commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
8a8d36d1f39cf893328b008cb11ef8e4a3fe71b5
txlege84/topics/management/commands/bootstraptopics.py
txlege84/topics/management/commands/bootstraptopics.py
from django.core.management.base import BaseCommand from topics.models import Topic class Command(BaseCommand): help = u'Bootstrap the topic lists in the database.' def handle(self, *args, **kwargs): self.load_topics() def load_topics(self): self.stdout.write(u'Loading hot list topics...') topics = [ u'Budget & Taxes', u'Criminal Justice', u'Energy', u'Environment', u'Ethics', u'Health & Human Services', u'Higher Education', u'Immigration & Border Security', u'Public Education', u'Social Justice', u'Transportation', ] for topic in topics: Topic.objects.get_or_create(name=topic)
from django.core.management.base import BaseCommand from topics.models import Topic class Command(BaseCommand): help = u'Bootstrap the topic lists in the database.' def handle(self, *args, **kwargs): self.load_topics() def load_topics(self): self.stdout.write(u'Loading hot list topics...') topics = [ u'Budget & Taxes', u'Energy', u'Environment', u'Ethics', u'Health & Human Services', u'Higher Education', u'Immigration & Border Security', u'Law & Order', u'Public Education', u'Social Justice', u'Transportation', ] for topic in topics: Topic.objects.get_or_create(name=topic)
Rename Criminal Justice to Law & Order, per Emily's request
Rename Criminal Justice to Law & Order, per Emily's request
Python
mit
texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84
python
## Code Before: from django.core.management.base import BaseCommand from topics.models import Topic class Command(BaseCommand): help = u'Bootstrap the topic lists in the database.' def handle(self, *args, **kwargs): self.load_topics() def load_topics(self): self.stdout.write(u'Loading hot list topics...') topics = [ u'Budget & Taxes', u'Criminal Justice', u'Energy', u'Environment', u'Ethics', u'Health & Human Services', u'Higher Education', u'Immigration & Border Security', u'Public Education', u'Social Justice', u'Transportation', ] for topic in topics: Topic.objects.get_or_create(name=topic) ## Instruction: Rename Criminal Justice to Law & Order, per Emily's request ## Code After: from django.core.management.base import BaseCommand from topics.models import Topic class Command(BaseCommand): help = u'Bootstrap the topic lists in the database.' def handle(self, *args, **kwargs): self.load_topics() def load_topics(self): self.stdout.write(u'Loading hot list topics...') topics = [ u'Budget & Taxes', u'Energy', u'Environment', u'Ethics', u'Health & Human Services', u'Higher Education', u'Immigration & Border Security', u'Law & Order', u'Public Education', u'Social Justice', u'Transportation', ] for topic in topics: Topic.objects.get_or_create(name=topic)
# ... existing code ... topics = [ u'Budget & Taxes', u'Energy', u'Environment', u'Ethics', # ... modified code ... u'Health & Human Services', u'Higher Education', u'Immigration & Border Security', u'Law & Order', u'Public Education', u'Social Justice', u'Transportation', # ... rest of the code ...
b1fa16fd4b4cc3b6983290fb38d0be54c2a21742
test_project/test_app/migrations/0002_initial_data.py
test_project/test_app/migrations/0002_initial_data.py
from __future__ import unicode_literals from django.core.management import call_command from django.db import migrations fixture = 'initial_data' def load_fixture(apps, schema_editor): call_command('loaddata', fixture, app_label='test_app') class Migration(migrations.Migration): dependencies = [ ('test_app', '0001_initial'), ] operations = [ migrations.RunPython(load_fixture), ]
from __future__ import unicode_literals from django.core.management import call_command from django.db import migrations fixture = 'initial_data' def load_fixture(apps, schema_editor): # StackOverflow says it is very wrong to loaddata here, we should get # "old" models and then load... but, this is only a simple test app # so whatever. Just don't use loaddata command in your migrations or # don't be suprised when it stops working... without understanding why. call_command('loaddata', fixture, app_label='test_app') class Migration(migrations.Migration): dependencies = [ ('test_app', '0001_initial'), ] operations = [ migrations.RunPython(load_fixture), ]
Add comment about how bad this is
Add comment about how bad this is
Python
mit
mpasternak/django-multiseek,mpasternak/django-multiseek,mpasternak/django-multiseek,mpasternak/django-multiseek
python
## Code Before: from __future__ import unicode_literals from django.core.management import call_command from django.db import migrations fixture = 'initial_data' def load_fixture(apps, schema_editor): call_command('loaddata', fixture, app_label='test_app') class Migration(migrations.Migration): dependencies = [ ('test_app', '0001_initial'), ] operations = [ migrations.RunPython(load_fixture), ] ## Instruction: Add comment about how bad this is ## Code After: from __future__ import unicode_literals from django.core.management import call_command from django.db import migrations fixture = 'initial_data' def load_fixture(apps, schema_editor): # StackOverflow says it is very wrong to loaddata here, we should get # "old" models and then load... but, this is only a simple test app # so whatever. Just don't use loaddata command in your migrations or # don't be suprised when it stops working... without understanding why. call_command('loaddata', fixture, app_label='test_app') class Migration(migrations.Migration): dependencies = [ ('test_app', '0001_initial'), ] operations = [ migrations.RunPython(load_fixture), ]
// ... existing code ... def load_fixture(apps, schema_editor): # StackOverflow says it is very wrong to loaddata here, we should get # "old" models and then load... but, this is only a simple test app # so whatever. Just don't use loaddata command in your migrations or # don't be suprised when it stops working... without understanding why. call_command('loaddata', fixture, app_label='test_app') // ... rest of the code ...
d4e5af537be36bd50405e60fdb46f31b88537916
src/commoner_i/views.py
src/commoner_i/views.py
from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse, Http404 def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.free: # return a 404 for FREE profiles raise Http404 if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
Raise a 404 when for FREE profile badge requests
Raise a 404 when for FREE profile badge requests
Python
agpl-3.0
cc-archive/commoner,cc-archive/commoner
python
## Code Before: from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png') ## Instruction: Raise a 404 when for FREE profile badge requests ## Code After: from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse, Http404 def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.free: # return a 404 for FREE profiles raise Http404 if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
# ... existing code ... from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse, Http404 def badge(request, username, size=''): # ... modified code ... profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.free: # return a 404 for FREE profiles raise Http404 if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), # ... rest of the code ...
4636c9394138534fc39cc5bdac373b97919ffd01
server/info/services.py
server/info/services.py
"""info services.""" from info.models import Article, News, Column def get_column_object(uid): """Get column object.""" try: obj = Column.objects.get(uid=uid) except Column.DoesNotExist: obj = None return obj def get_articles_by_column(uid): """Get_articles_by_column.""" queryset = Article.objects.filter(column__uid=uid).order_by('id') return queryset def get_columns_queryset(): """Get_columns_queryset.""" queryset = Column.objects.all().order_by('-id') return queryset def get_article_queryset(): """Get article queryset.""" queryset = Article.objects.all().order_by('-id') return queryset def get_article_object(uid): """Get article object.""" return Article.objects.get(uid=uid) def get_news_queryset(): """Get news queryset.""" return News.objects.all().order_by('-id')
"""info services.""" from info.models import Article, News, Column def get_column_object(uid): """Get column object.""" try: obj = Column.objects.get(uid=uid) except Column.DoesNotExist: obj = None return obj def get_articles_by_column(uid): """Get_articles_by_column.""" queryset = Article.objects.filter( column__uid=uid ).order_by('id') return queryset def get_columns_queryset(): """Get_columns_queryset.""" queryset = Column.objects.all().only('uid', 'name').order_by('-id') return queryset def get_article_queryset(): """Get article queryset.""" queryset = Article.objects.all().order_by('-id') return queryset def get_article_object(uid): """Get article object.""" return Article.objects.get(uid=uid) def get_news_queryset(): """Get news queryset.""" return News.objects.all().order_by('-id')
Modify django orm filter, add only
Modify django orm filter, add only
Python
mit
istommao/codingcatweb,istommao/codingcatweb,istommao/codingcatweb
python
## Code Before: """info services.""" from info.models import Article, News, Column def get_column_object(uid): """Get column object.""" try: obj = Column.objects.get(uid=uid) except Column.DoesNotExist: obj = None return obj def get_articles_by_column(uid): """Get_articles_by_column.""" queryset = Article.objects.filter(column__uid=uid).order_by('id') return queryset def get_columns_queryset(): """Get_columns_queryset.""" queryset = Column.objects.all().order_by('-id') return queryset def get_article_queryset(): """Get article queryset.""" queryset = Article.objects.all().order_by('-id') return queryset def get_article_object(uid): """Get article object.""" return Article.objects.get(uid=uid) def get_news_queryset(): """Get news queryset.""" return News.objects.all().order_by('-id') ## Instruction: Modify django orm filter, add only ## Code After: """info services.""" from info.models import Article, News, Column def get_column_object(uid): """Get column object.""" try: obj = Column.objects.get(uid=uid) except Column.DoesNotExist: obj = None return obj def get_articles_by_column(uid): """Get_articles_by_column.""" queryset = Article.objects.filter( column__uid=uid ).order_by('id') return queryset def get_columns_queryset(): """Get_columns_queryset.""" queryset = Column.objects.all().only('uid', 'name').order_by('-id') return queryset def get_article_queryset(): """Get article queryset.""" queryset = Article.objects.all().order_by('-id') return queryset def get_article_object(uid): """Get article object.""" return Article.objects.get(uid=uid) def get_news_queryset(): """Get news queryset.""" return News.objects.all().order_by('-id')
# ... existing code ... def get_articles_by_column(uid): """Get_articles_by_column.""" queryset = Article.objects.filter( column__uid=uid ).order_by('id') return queryset # ... modified code ... def get_columns_queryset(): """Get_columns_queryset.""" queryset = Column.objects.all().only('uid', 'name').order_by('-id') return queryset # ... rest of the code ...
f18cf3c17e450eb6f8db5288ecf146eff0968a47
xmt/select.py
xmt/select.py
from itertools import groupby from nltk.translate.gleu_score import sentence_gleu as gleu from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize def select_first(p): """ Return (hypothesis, reference) translation pairs using the first realization result per item. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): row = next(group) pairs.append((row['g-result:surface'], row['item:i-translation'])) return pairs def select_oracle(p): """ Return (hypothesis, reference) translation pairs using the realization result per item with the highest GLEU score. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): hrs = ((r['g-result:surface'], r['item:i-translation']) for r in group) ranked = [(gleu(_tokenize(r), _tokenize(h)), h, r) for h, r in hrs] _, hyp, ref = sorted(ranked, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs
from itertools import groupby from nltk.translate import bleu_score from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize _smoother = bleu_score.SmoothingFunction().method3 bleu = bleu_score.sentence_bleu def select_first(p): """ Return (hypothesis, reference) translation pairs using the first realization result per item. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): row = next(group) pairs.append((row['g-result:surface'], row['item:i-translation'])) return pairs def select_oracle(p): """ Return (hypothesis, reference) translation pairs using the realization result per item with the highest GLEU score. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): scored = [] for res in group: ref = res['item:i-translation'] hyp = res['g-result:surface'] scored.append( (bleu([_tokenize(ref)], _tokenize(hyp), smoothing_function=_smoother), hyp, ref) ) _, hyp, ref = sorted(scored, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs
Use NIST-BLEU instead of GLEU for oracle.
Use NIST-BLEU instead of GLEU for oracle.
Python
mit
goodmami/xmt,goodmami/xmt
python
## Code Before: from itertools import groupby from nltk.translate.gleu_score import sentence_gleu as gleu from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize def select_first(p): """ Return (hypothesis, reference) translation pairs using the first realization result per item. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): row = next(group) pairs.append((row['g-result:surface'], row['item:i-translation'])) return pairs def select_oracle(p): """ Return (hypothesis, reference) translation pairs using the realization result per item with the highest GLEU score. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): hrs = ((r['g-result:surface'], r['item:i-translation']) for r in group) ranked = [(gleu(_tokenize(r), _tokenize(h)), h, r) for h, r in hrs] _, hyp, ref = sorted(ranked, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs ## Instruction: Use NIST-BLEU instead of GLEU for oracle. ## Code After: from itertools import groupby from nltk.translate import bleu_score from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize _smoother = bleu_score.SmoothingFunction().method3 bleu = bleu_score.sentence_bleu def select_first(p): """ Return (hypothesis, reference) translation pairs using the first realization result per item. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): row = next(group) pairs.append((row['g-result:surface'], row['item:i-translation'])) return pairs def select_oracle(p): """ Return (hypothesis, reference) translation pairs using the realization result per item with the highest GLEU score. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): scored = [] for res in group: ref = res['item:i-translation'] hyp = res['g-result:surface'] scored.append( (bleu([_tokenize(ref)], _tokenize(hyp), smoothing_function=_smoother), hyp, ref) ) _, hyp, ref = sorted(scored, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs
// ... existing code ... from itertools import groupby from nltk.translate import bleu_score from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize _smoother = bleu_score.SmoothingFunction().method3 bleu = bleu_score.sentence_bleu def select_first(p): """ // ... modified code ... pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): scored = [] for res in group: ref = res['item:i-translation'] hyp = res['g-result:surface'] scored.append( (bleu([_tokenize(ref)], _tokenize(hyp), smoothing_function=_smoother), hyp, ref) ) _, hyp, ref = sorted(scored, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs // ... rest of the code ...
fdf33278f66028a932dbecb999f66445ab0a3cd1
shuup/admin/modules/product_types/views/edit.py
shuup/admin/modules/product_types/views/edit.py
from __future__ import unicode_literals from django import forms from shuup.admin.utils.views import CreateOrUpdateView from shuup.core.models import ProductType from shuup.utils.multilanguage_model_form import MultiLanguageModelForm class ProductTypeForm(MultiLanguageModelForm): class Meta: model = ProductType exclude = () # All the fields! widgets = { "attributes": forms.CheckboxSelectMultiple } class ProductTypeEditView(CreateOrUpdateView): model = ProductType form_class = ProductTypeForm template_name = "shuup/admin/product_types/edit.jinja" context_object_name = "product_type"
from __future__ import unicode_literals from shuup.admin.forms.fields import Select2MultipleField from shuup.admin.utils.views import CreateOrUpdateView from shuup.core.models import Attribute, ProductType from shuup.utils.multilanguage_model_form import MultiLanguageModelForm class ProductTypeForm(MultiLanguageModelForm): attributes = Select2MultipleField(model=Attribute, required=False) class Meta: model = ProductType exclude = () def __init__(self, **kwargs): super(ProductTypeForm, self).__init__(**kwargs) if self.instance.pk: choices = [(a.pk, a.name) for a in self.instance.attributes.all()] self.fields["attributes"].widget.choices = choices self.fields["attributes"].initial = [pk for pk, name in choices] def clean_attributes(self): attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])] return Attribute.objects.filter(pk__in=attributes).all() def save(self, commit=True): obj = super(ProductTypeForm, self).save(commit=commit) obj.attributes.clear() obj.attributes = self.cleaned_data["attributes"] return self.instance class ProductTypeEditView(CreateOrUpdateView): model = ProductType form_class = ProductTypeForm template_name = "shuup/admin/product_types/edit.jinja" context_object_name = "product_type"
Use Select2 in attribute selection
Use Select2 in attribute selection With large amounts of attributes product type creation was really slow Refs SH-73
Python
agpl-3.0
shawnadelic/shuup,suutari-ai/shoop,shoopio/shoop,suutari-ai/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,suutari/shoop,suutari/shoop,hrayr-artunyan/shuup,suutari/shoop,suutari-ai/shoop,hrayr-artunyan/shuup,shoopio/shoop,shawnadelic/shuup,shoopio/shoop
python
## Code Before: from __future__ import unicode_literals from django import forms from shuup.admin.utils.views import CreateOrUpdateView from shuup.core.models import ProductType from shuup.utils.multilanguage_model_form import MultiLanguageModelForm class ProductTypeForm(MultiLanguageModelForm): class Meta: model = ProductType exclude = () # All the fields! widgets = { "attributes": forms.CheckboxSelectMultiple } class ProductTypeEditView(CreateOrUpdateView): model = ProductType form_class = ProductTypeForm template_name = "shuup/admin/product_types/edit.jinja" context_object_name = "product_type" ## Instruction: Use Select2 in attribute selection With large amounts of attributes product type creation was really slow Refs SH-73 ## Code After: from __future__ import unicode_literals from shuup.admin.forms.fields import Select2MultipleField from shuup.admin.utils.views import CreateOrUpdateView from shuup.core.models import Attribute, ProductType from shuup.utils.multilanguage_model_form import MultiLanguageModelForm class ProductTypeForm(MultiLanguageModelForm): attributes = Select2MultipleField(model=Attribute, required=False) class Meta: model = ProductType exclude = () def __init__(self, **kwargs): super(ProductTypeForm, self).__init__(**kwargs) if self.instance.pk: choices = [(a.pk, a.name) for a in self.instance.attributes.all()] self.fields["attributes"].widget.choices = choices self.fields["attributes"].initial = [pk for pk, name in choices] def clean_attributes(self): attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])] return Attribute.objects.filter(pk__in=attributes).all() def save(self, commit=True): obj = super(ProductTypeForm, self).save(commit=commit) obj.attributes.clear() obj.attributes = self.cleaned_data["attributes"] return self.instance class ProductTypeEditView(CreateOrUpdateView): model = ProductType form_class = ProductTypeForm template_name = "shuup/admin/product_types/edit.jinja" context_object_name = "product_type"
... from __future__ import unicode_literals from shuup.admin.forms.fields import Select2MultipleField from shuup.admin.utils.views import CreateOrUpdateView from shuup.core.models import Attribute, ProductType from shuup.utils.multilanguage_model_form import MultiLanguageModelForm class ProductTypeForm(MultiLanguageModelForm): attributes = Select2MultipleField(model=Attribute, required=False) class Meta: model = ProductType exclude = () def __init__(self, **kwargs): super(ProductTypeForm, self).__init__(**kwargs) if self.instance.pk: choices = [(a.pk, a.name) for a in self.instance.attributes.all()] self.fields["attributes"].widget.choices = choices self.fields["attributes"].initial = [pk for pk, name in choices] def clean_attributes(self): attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])] return Attribute.objects.filter(pk__in=attributes).all() def save(self, commit=True): obj = super(ProductTypeForm, self).save(commit=commit) obj.attributes.clear() obj.attributes = self.cleaned_data["attributes"] return self.instance class ProductTypeEditView(CreateOrUpdateView): ...
eef768a538c82629073b360618d8b39bcbf4c474
tests/dojo_test.py
tests/dojo_test.py
import unittest from src.dojo import Dojo class TestCreateRoom (unittest.TestCase): def setUp(self): self.dojo = Dojo() self.test_office = self.dojo.create_room("office", "test") self.test_living_space = self.dojo.create_room("living_space", "test living space") def test_create_room_successfully(self): initial_room_count = len(self.dojo.all_rooms) blue_office = self.dojo.create_room("office", "Blue") self.assertTrue(blue_office) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 1) def test_create_rooms_successfully(self): initial_room_count = len(self.dojo.all_rooms) offices = self.dojo.create_room("office", "Blue", "Black", "Brown") self.assertTrue(offices) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 3) def test_addition_of_duplicate_room_names(self): pass
import unittest from src.dojo import Dojo class TestCreateRoom (unittest.TestCase): def setUp(self): self.dojo = Dojo() self.test_office = self.dojo.create_room("office", "test") self.test_living_space = self.dojo.create_room("living_space", "test living space") def test_create_room_successfully(self): initial_room_count = len(self.dojo.all_rooms) blue_office = self.dojo.create_room("office", "Blue") self.assertTrue(blue_office) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 1) def test_create_rooms_successfully(self): initial_room_count = len(self.dojo.all_rooms) offices = self.dojo.create_room("office", "Blue", "Black", "Brown") self.assertTrue(offices) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 3) def test_addition_of_duplicate_room_names(self): initial_room_count = len(self.dojo.all_people) room1 = self.dojo.create_room("office", "Blue") room1 = self.dojo.create_room("office", "Blue") new_room_count = len(self.dojo.all_people) self.assertEqual(new_room_count - initial_room_count, 0)
Implement test for duplicate rooms
Implement test for duplicate rooms
Python
mit
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
python
## Code Before: import unittest from src.dojo import Dojo class TestCreateRoom (unittest.TestCase): def setUp(self): self.dojo = Dojo() self.test_office = self.dojo.create_room("office", "test") self.test_living_space = self.dojo.create_room("living_space", "test living space") def test_create_room_successfully(self): initial_room_count = len(self.dojo.all_rooms) blue_office = self.dojo.create_room("office", "Blue") self.assertTrue(blue_office) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 1) def test_create_rooms_successfully(self): initial_room_count = len(self.dojo.all_rooms) offices = self.dojo.create_room("office", "Blue", "Black", "Brown") self.assertTrue(offices) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 3) def test_addition_of_duplicate_room_names(self): pass ## Instruction: Implement test for duplicate rooms ## Code After: import unittest from src.dojo import Dojo class TestCreateRoom (unittest.TestCase): def setUp(self): self.dojo = Dojo() self.test_office = self.dojo.create_room("office", "test") self.test_living_space = self.dojo.create_room("living_space", "test living space") def test_create_room_successfully(self): initial_room_count = len(self.dojo.all_rooms) blue_office = self.dojo.create_room("office", "Blue") self.assertTrue(blue_office) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 1) def test_create_rooms_successfully(self): initial_room_count = len(self.dojo.all_rooms) offices = self.dojo.create_room("office", "Blue", "Black", "Brown") self.assertTrue(offices) new_room_count = len(self.dojo.all_rooms) self.assertEqual(new_room_count - initial_room_count, 3) def test_addition_of_duplicate_room_names(self): initial_room_count = len(self.dojo.all_people) room1 = self.dojo.create_room("office", "Blue") room1 = self.dojo.create_room("office", "Blue") new_room_count = len(self.dojo.all_people) self.assertEqual(new_room_count - initial_room_count, 0)
... self.assertEqual(new_room_count - initial_room_count, 3) def test_addition_of_duplicate_room_names(self): initial_room_count = len(self.dojo.all_people) room1 = self.dojo.create_room("office", "Blue") room1 = self.dojo.create_room("office", "Blue") new_room_count = len(self.dojo.all_people) self.assertEqual(new_room_count - initial_room_count, 0) ...
546855166b3f1ebda104774feb48087eb34467fa
src/test/java/fi/helsinki/cs/tmc/cli/io/ColorTest.java
src/test/java/fi/helsinki/cs/tmc/cli/io/ColorTest.java
package fi.helsinki.cs.tmc.cli.io; import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; public class ColorTest { private Boolean noColor; @Before public void setup() { this.noColor = EnvironmentUtil.isWindows(); } @Test public void colorsWork() { String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); if (!noColor) { assertEquals("\u001B[30mfoobar\u001B[0m", string); } else { assertEquals("foobar", string); } } @Test public void noColorWorks() { String string = Color.colorString("foobar", Color.AnsiColor.ANSI_NONE); assertEquals("foobar", string); } }
package fi.helsinki.cs.tmc.cli.io; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest(EnvironmentUtil.class) public class ColorTest { @Before public void setup() { PowerMockito.mockStatic(EnvironmentUtil.class); } @Test public void colorsWorkInNonWindows() { when(EnvironmentUtil.isWindows()).thenReturn(false); String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); assertEquals("\u001B[30mfoobar\u001B[0m", string); } @Test public void colorsWorkInWindows() { when(EnvironmentUtil.isWindows()).thenReturn(true); String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); assertEquals("foobar", string); } @Test public void noColorWorks() { String string = Color.colorString("foobar", Color.AnsiColor.ANSI_NONE); assertEquals("foobar", string); } }
Make the color tests OS agnostic.
Make the color tests OS agnostic.
Java
mit
tmc-cli/tmc-cli,tmc-cli/tmc-cli,testmycode/tmc-cli,testmycode/tmc-cli
java
## Code Before: package fi.helsinki.cs.tmc.cli.io; import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; public class ColorTest { private Boolean noColor; @Before public void setup() { this.noColor = EnvironmentUtil.isWindows(); } @Test public void colorsWork() { String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); if (!noColor) { assertEquals("\u001B[30mfoobar\u001B[0m", string); } else { assertEquals("foobar", string); } } @Test public void noColorWorks() { String string = Color.colorString("foobar", Color.AnsiColor.ANSI_NONE); assertEquals("foobar", string); } } ## Instruction: Make the color tests OS agnostic. ## Code After: package fi.helsinki.cs.tmc.cli.io; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest(EnvironmentUtil.class) public class ColorTest { @Before public void setup() { PowerMockito.mockStatic(EnvironmentUtil.class); } @Test public void colorsWorkInNonWindows() { when(EnvironmentUtil.isWindows()).thenReturn(false); String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); assertEquals("\u001B[30mfoobar\u001B[0m", string); } @Test public void colorsWorkInWindows() { when(EnvironmentUtil.isWindows()).thenReturn(true); String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); assertEquals("foobar", string); } @Test public void noColorWorks() { String string = Color.colorString("foobar", Color.AnsiColor.ANSI_NONE); assertEquals("foobar", string); } }
... package fi.helsinki.cs.tmc.cli.io; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.when; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest(EnvironmentUtil.class) public class ColorTest { @Before public void setup() { PowerMockito.mockStatic(EnvironmentUtil.class); } @Test public void colorsWorkInNonWindows() { when(EnvironmentUtil.isWindows()).thenReturn(false); String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); assertEquals("\u001B[30mfoobar\u001B[0m", string); } @Test public void colorsWorkInWindows() { when(EnvironmentUtil.isWindows()).thenReturn(true); String string = Color.colorString("foobar", Color.AnsiColor.ANSI_BLACK); assertEquals("foobar", string); } @Test ...
d30062a1f0b865de646b0f2f8d9d176ca9b92769
setup.py
setup.py
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='[email protected]', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='[email protected]', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
Fix issue with path variable
Fix issue with path variable
Python
apache-2.0
odin-public/osaAPI
python
## Code Before: import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.join(__path__, 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version_getter=version, author='apsliteteam, oznu', author_email='[email protected]', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), ) ## Instruction: Fix issue with path variable ## Code After: import os from setuptools import setup PACKAGE_VERSION = '0.3' def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER'))) with version_file() as verfile: data = verfile.readlines() return data[0].strip() setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='[email protected]', packages=['osaapi'], url='https://aps.odin.com', license='Apache License', description='A python client for the Odin Service Automation (OSA) and billing APIs.', long_description=open('README.md').read(), )
// ... existing code ... def version(): def version_file(mode='r'): return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode) if os.getenv('TRAVIS'): with version_file('w') as verfile: // ... modified code ... setup( name='osaapi', version=version(), author='apsliteteam, oznu', author_email='[email protected]', packages=['osaapi'], // ... rest of the code ...
bb0b72333b715956740373c3ba80a8193b99a8cc
app/services/updater_service.py
app/services/updater_service.py
from app.system.updater import check_updates, do_upgrade, run_ansible from app.views import SimpleBackgroundView from .base import BaseService, BlockingServiceStart class UpdaterService(BaseService, BlockingServiceStart): def __init__(self, observer=None): super().__init__(observer=observer) self._view = SimpleBackgroundView("Checking for updates.") def on_service_start(self): values = check_updates() for val in values: self._view.args["subtitle"] = "Working with: " + str(val) do_upgrade([val]) if values: run_ansible() def view(self): return self._view
from app.system.updater import check_updates, do_upgrade, run_ansible from app.views import SimpleBackgroundView from .base import BaseService, BlockingServiceStart class UpdaterService(BaseService, BlockingServiceStart): def __init__(self, observer=None): super().__init__(observer=observer) self._view = SimpleBackgroundView("Checking for updates.") def on_service_start(self): values = check_updates() for val in values: self._view.args["subtitle"] = "Working with: " + str(val) do_upgrade([val]) if values: self._view.args["subtitle"] = "Finishing up..." run_ansible() def view(self): return self._view
Add message before running ansible.
Add message before running ansible.
Python
mit
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
python
## Code Before: from app.system.updater import check_updates, do_upgrade, run_ansible from app.views import SimpleBackgroundView from .base import BaseService, BlockingServiceStart class UpdaterService(BaseService, BlockingServiceStart): def __init__(self, observer=None): super().__init__(observer=observer) self._view = SimpleBackgroundView("Checking for updates.") def on_service_start(self): values = check_updates() for val in values: self._view.args["subtitle"] = "Working with: " + str(val) do_upgrade([val]) if values: run_ansible() def view(self): return self._view ## Instruction: Add message before running ansible. ## Code After: from app.system.updater import check_updates, do_upgrade, run_ansible from app.views import SimpleBackgroundView from .base import BaseService, BlockingServiceStart class UpdaterService(BaseService, BlockingServiceStart): def __init__(self, observer=None): super().__init__(observer=observer) self._view = SimpleBackgroundView("Checking for updates.") def on_service_start(self): values = check_updates() for val in values: self._view.args["subtitle"] = "Working with: " + str(val) do_upgrade([val]) if values: self._view.args["subtitle"] = "Finishing up..." run_ansible() def view(self): return self._view
// ... existing code ... self._view.args["subtitle"] = "Working with: " + str(val) do_upgrade([val]) if values: self._view.args["subtitle"] = "Finishing up..." run_ansible() def view(self): // ... rest of the code ...
7dd92f81ba0bf43973da733a70d244ce321108a5
subprojects/core/src/main/java/org/gradle/api/internal/changedetection/state/DefaultNormalizedFileSnapshot.java
subprojects/core/src/main/java/org/gradle/api/internal/changedetection/state/DefaultNormalizedFileSnapshot.java
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.changedetection.state; import org.gradle.api.internal.changedetection.state.mirror.PhysicalSnapshot; import org.gradle.internal.file.FileType; import org.gradle.internal.hash.HashCode; public class DefaultNormalizedFileSnapshot extends AbstractNormalizedFileSnapshot { private final String normalizedPath; public DefaultNormalizedFileSnapshot(String normalizedPath, FileType type, HashCode contentHash) { super(type, contentHash); this.normalizedPath = normalizedPath; } public DefaultNormalizedFileSnapshot(String normalizedPath, PhysicalSnapshot snapshot) { this(normalizedPath, snapshot.getType(), snapshot.getType() == FileType.Directory ? DIR_SIGNATURE : snapshot.getHash()); } @Override public String getNormalizedPath() { return normalizedPath; } }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.changedetection.state; import org.gradle.api.internal.changedetection.state.mirror.PhysicalSnapshot; import org.gradle.internal.file.FileType; import org.gradle.internal.hash.HashCode; public class DefaultNormalizedFileSnapshot extends AbstractNormalizedFileSnapshot { private final String normalizedPath; public DefaultNormalizedFileSnapshot(String normalizedPath, FileType type, HashCode contentHash) { super(type, hashForType(type, contentHash)); this.normalizedPath = normalizedPath; } public DefaultNormalizedFileSnapshot(String normalizedPath, PhysicalSnapshot snapshot) { this(normalizedPath, snapshot.getType(), snapshot.getHash()); } private static HashCode hashForType(FileType fileType, HashCode hash) { switch (fileType) { case Directory: return DIR_SIGNATURE; case Missing: return MISSING_FILE_SIGNATURE; case RegularFile: return hash; default: throw new IllegalStateException("Unknown file type: " + fileType); } } @Override public String getNormalizedPath() { return normalizedPath; } }
Use correct hash for missing files
Use correct hash for missing files
Java
apache-2.0
robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,gradle/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,lsmaira/gradle
java
## Code Before: /* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.changedetection.state; import org.gradle.api.internal.changedetection.state.mirror.PhysicalSnapshot; import org.gradle.internal.file.FileType; import org.gradle.internal.hash.HashCode; public class DefaultNormalizedFileSnapshot extends AbstractNormalizedFileSnapshot { private final String normalizedPath; public DefaultNormalizedFileSnapshot(String normalizedPath, FileType type, HashCode contentHash) { super(type, contentHash); this.normalizedPath = normalizedPath; } public DefaultNormalizedFileSnapshot(String normalizedPath, PhysicalSnapshot snapshot) { this(normalizedPath, snapshot.getType(), snapshot.getType() == FileType.Directory ? DIR_SIGNATURE : snapshot.getHash()); } @Override public String getNormalizedPath() { return normalizedPath; } } ## Instruction: Use correct hash for missing files ## Code After: /* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.changedetection.state; import org.gradle.api.internal.changedetection.state.mirror.PhysicalSnapshot; import org.gradle.internal.file.FileType; import org.gradle.internal.hash.HashCode; public class DefaultNormalizedFileSnapshot extends AbstractNormalizedFileSnapshot { private final String normalizedPath; public DefaultNormalizedFileSnapshot(String normalizedPath, FileType type, HashCode contentHash) { super(type, hashForType(type, contentHash)); this.normalizedPath = normalizedPath; } public DefaultNormalizedFileSnapshot(String normalizedPath, PhysicalSnapshot snapshot) { this(normalizedPath, snapshot.getType(), snapshot.getHash()); } private static HashCode hashForType(FileType fileType, HashCode hash) { switch (fileType) { case Directory: return DIR_SIGNATURE; case Missing: return MISSING_FILE_SIGNATURE; case RegularFile: return hash; default: throw new IllegalStateException("Unknown file type: " + fileType); } } @Override public String getNormalizedPath() { return normalizedPath; } }
// ... existing code ... private final String normalizedPath; public DefaultNormalizedFileSnapshot(String normalizedPath, FileType type, HashCode contentHash) { super(type, hashForType(type, contentHash)); this.normalizedPath = normalizedPath; } public DefaultNormalizedFileSnapshot(String normalizedPath, PhysicalSnapshot snapshot) { this(normalizedPath, snapshot.getType(), snapshot.getHash()); } private static HashCode hashForType(FileType fileType, HashCode hash) { switch (fileType) { case Directory: return DIR_SIGNATURE; case Missing: return MISSING_FILE_SIGNATURE; case RegularFile: return hash; default: throw new IllegalStateException("Unknown file type: " + fileType); } } @Override // ... rest of the code ...
ddb70c43c0b63cb5af74fb059975cac17bf9f7b9
mdot_rest/views.py
mdot_rest/views.py
from django.shortcuts import render from .models import Resource from .serializers import ResourceSerializer from rest_framework import generics class ResourceList(generics.ListCreateAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer class ResourceDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer
from django.shortcuts import render from .models import Resource from .serializers import ResourceSerializer from rest_framework import generics, permissions class ResourceList(generics.ListCreateAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) class ResourceDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
Make the API read only unless authenticated.
Make the API read only unless authenticated.
Python
apache-2.0
uw-it-aca/mdot-rest,uw-it-aca/mdot-rest
python
## Code Before: from django.shortcuts import render from .models import Resource from .serializers import ResourceSerializer from rest_framework import generics class ResourceList(generics.ListCreateAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer class ResourceDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer ## Instruction: Make the API read only unless authenticated. ## Code After: from django.shortcuts import render from .models import Resource from .serializers import ResourceSerializer from rest_framework import generics, permissions class ResourceList(generics.ListCreateAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) class ResourceDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
... from django.shortcuts import render from .models import Resource from .serializers import ResourceSerializer from rest_framework import generics, permissions class ResourceList(generics.ListCreateAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) class ResourceDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Resource.objects.all() serializer_class = ResourceSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) ...
4ff1eb00f8e212d280ac858feb4efcc795d97d80
tests/test_models.py
tests/test_models.py
import pytest from suddendev.models import GameController def test_create_game(session): pass
import pytest from suddendev.models import GameSetup def test_create_game(session): game_setup = GameSetup('ASDF') assert game_setup.player_count == 1
Fix broken import in model tests.
[NG] Fix broken import in model tests.
Python
mit
SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev
python
## Code Before: import pytest from suddendev.models import GameController def test_create_game(session): pass ## Instruction: [NG] Fix broken import in model tests. ## Code After: import pytest from suddendev.models import GameSetup def test_create_game(session): game_setup = GameSetup('ASDF') assert game_setup.player_count == 1
// ... existing code ... import pytest from suddendev.models import GameSetup def test_create_game(session): game_setup = GameSetup('ASDF') assert game_setup.player_count == 1 // ... rest of the code ...
b2d654cf2af71b608d81c6501b214a9b330e1ffe
battlenet/utils.py
battlenet/utils.py
import unicodedata import urllib def normalize(name): if not isinstance(name, unicode): name = name.decode('utf-8') return unicodedata.normalize('NFKC', name.replace("'", '')).encode('utf-8') def quote(name): if isinstance(name, unicode): name = normalize(name).encode('utf8') return urllib.quote(name) def make_icon_url(region, icon, size='large'): if not icon: return '' if size == 'small': size = 18 else: size = 56 return 'http://%s.media.blizzard.com/wow/icons/%d/%s.jpg' % (region, size, icon) def make_connection(): if not hasattr(make_connection, 'Connection'): from .connection import Connection make_connection.Connection = Connection return make_connection.Connection()
import unicodedata import urllib def normalize(name): if not isinstance(name, unicode): name = name.decode('utf-8') return unicodedata.normalize('NFKC', name.replace("'", '')).encode('utf-8') def quote(name): if isinstance(name, unicode): name = normalize(name) return urllib.quote(name) def make_icon_url(region, icon, size='large'): if not icon: return '' if size == 'small': size = 18 else: size = 56 return 'http://%s.media.blizzard.com/wow/icons/%d/%s.jpg' % (region, size, icon) def make_connection(): if not hasattr(make_connection, 'Connection'): from .connection import Connection make_connection.Connection = Connection return make_connection.Connection()
Normalize already returns encoded value.
Normalize already returns encoded value.
Python
mit
PuckCh/battlenet,vishnevskiy/battlenet
python
## Code Before: import unicodedata import urllib def normalize(name): if not isinstance(name, unicode): name = name.decode('utf-8') return unicodedata.normalize('NFKC', name.replace("'", '')).encode('utf-8') def quote(name): if isinstance(name, unicode): name = normalize(name).encode('utf8') return urllib.quote(name) def make_icon_url(region, icon, size='large'): if not icon: return '' if size == 'small': size = 18 else: size = 56 return 'http://%s.media.blizzard.com/wow/icons/%d/%s.jpg' % (region, size, icon) def make_connection(): if not hasattr(make_connection, 'Connection'): from .connection import Connection make_connection.Connection = Connection return make_connection.Connection() ## Instruction: Normalize already returns encoded value. ## Code After: import unicodedata import urllib def normalize(name): if not isinstance(name, unicode): name = name.decode('utf-8') return unicodedata.normalize('NFKC', name.replace("'", '')).encode('utf-8') def quote(name): if isinstance(name, unicode): name = normalize(name) return urllib.quote(name) def make_icon_url(region, icon, size='large'): if not icon: return '' if size == 'small': size = 18 else: size = 56 return 'http://%s.media.blizzard.com/wow/icons/%d/%s.jpg' % (region, size, icon) def make_connection(): if not hasattr(make_connection, 'Connection'): from .connection import Connection make_connection.Connection = Connection return make_connection.Connection()
... def quote(name): if isinstance(name, unicode): name = normalize(name) return urllib.quote(name) ...
eac383015161f661de33a94dae958a21761071dc
zeus/run.py
zeus/run.py
from app import app import config from rest_api.controller import rest_blueprint app.register_blueprint(rest_blueprint) if __name__ == "__main__": app.run()
from app import app import config from rest_api.controller import rest_blueprint app.register_blueprint(rest_blueprint, url_prefix="/api") if __name__ == "__main__": app.run()
Move all api routes into /api/
Move all api routes into /api/
Python
bsd-2-clause
nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,ZachAnders/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs
python
## Code Before: from app import app import config from rest_api.controller import rest_blueprint app.register_blueprint(rest_blueprint) if __name__ == "__main__": app.run() ## Instruction: Move all api routes into /api/ ## Code After: from app import app import config from rest_api.controller import rest_blueprint app.register_blueprint(rest_blueprint, url_prefix="/api") if __name__ == "__main__": app.run()
# ... existing code ... import config from rest_api.controller import rest_blueprint app.register_blueprint(rest_blueprint, url_prefix="/api") if __name__ == "__main__": app.run() # ... rest of the code ...
edec252d9a050ead0084280f9772f05a2a3d7608
preferences/forms.py
preferences/forms.py
from registration.forms import RegistrationFormUniqueEmail class RegistrationUserForm(RegistrationFormUniqueEmail): class Meta: model = User fields = ("email")
from django import forms from registration.forms import RegistrationFormUniqueEmail from preferences.models import Preferences # from django.forms import ModelForm # class RegistrationUserForm(RegistrationFormUniqueEmail): # class Meta: # model = User # fields = ("email") class PreferencesForm(forms.ModelForm): class Meta: model = Preferences fields = ['representitive', 'senator', 'street_line1', 'street_line2', 'zipcode', 'city', 'state']
Add preferences form built off model
Add preferences form built off model
Python
mit
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
python
## Code Before: from registration.forms import RegistrationFormUniqueEmail class RegistrationUserForm(RegistrationFormUniqueEmail): class Meta: model = User fields = ("email") ## Instruction: Add preferences form built off model ## Code After: from django import forms from registration.forms import RegistrationFormUniqueEmail from preferences.models import Preferences # from django.forms import ModelForm # class RegistrationUserForm(RegistrationFormUniqueEmail): # class Meta: # model = User # fields = ("email") class PreferencesForm(forms.ModelForm): class Meta: model = Preferences fields = ['representitive', 'senator', 'street_line1', 'street_line2', 'zipcode', 'city', 'state']
// ... existing code ... from django import forms from registration.forms import RegistrationFormUniqueEmail from preferences.models import Preferences # from django.forms import ModelForm # class RegistrationUserForm(RegistrationFormUniqueEmail): # class Meta: # model = User # fields = ("email") class PreferencesForm(forms.ModelForm): class Meta: model = Preferences fields = ['representitive', 'senator', 'street_line1', 'street_line2', 'zipcode', 'city', 'state'] // ... rest of the code ...
84b4fc8fdc3808340293c076a1628bf0decd2d2c
setup.py
setup.py
from distutils.core import setup setup(name="minishift-python", version="0.1.2", description="Python interface for the minishift", author="Nick Johnson", author_email="[email protected]", url="https://github.com/arachnidlabs/minishift-python/", packages=["minishift"], requires=["mcp2210"])
from distutils.core import setup setup(name="minishift-python", version="0.1.3", description="Python interface for the minishift", author="Nick Johnson", author_email="[email protected]", url="https://github.com/arachnidlabs/minishift-python/", packages=["minishift"], install_requires=["mcp2210", "python-daemon"])
Add python-daemon as a dep
Add python-daemon as a dep
Python
bsd-3-clause
arachnidlabs/minishift-python
python
## Code Before: from distutils.core import setup setup(name="minishift-python", version="0.1.2", description="Python interface for the minishift", author="Nick Johnson", author_email="[email protected]", url="https://github.com/arachnidlabs/minishift-python/", packages=["minishift"], requires=["mcp2210"]) ## Instruction: Add python-daemon as a dep ## Code After: from distutils.core import setup setup(name="minishift-python", version="0.1.3", description="Python interface for the minishift", author="Nick Johnson", author_email="[email protected]", url="https://github.com/arachnidlabs/minishift-python/", packages=["minishift"], install_requires=["mcp2210", "python-daemon"])
# ... existing code ... from distutils.core import setup setup(name="minishift-python", version="0.1.3", description="Python interface for the minishift", author="Nick Johnson", author_email="[email protected]", url="https://github.com/arachnidlabs/minishift-python/", packages=["minishift"], install_requires=["mcp2210", "python-daemon"]) # ... rest of the code ...
534633d078fe6f81e67ead075ac31faac0c3c60d
tests/__init__.py
tests/__init__.py
import pycurl def setup_package(): print('Testing %s' % pycurl.version)
def setup_package(): # import here, not globally, so that running # python -m tests.appmanager # to launch the app manager is possible without having pycurl installed # (as the test app does not depend on pycurl) import pycurl print('Testing %s' % pycurl.version)
Make it possible to run test app without pycurl being installed
Make it possible to run test app without pycurl being installed
Python
lgpl-2.1
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
python
## Code Before: import pycurl def setup_package(): print('Testing %s' % pycurl.version) ## Instruction: Make it possible to run test app without pycurl being installed ## Code After: def setup_package(): # import here, not globally, so that running # python -m tests.appmanager # to launch the app manager is possible without having pycurl installed # (as the test app does not depend on pycurl) import pycurl print('Testing %s' % pycurl.version)
// ... existing code ... def setup_package(): # import here, not globally, so that running # python -m tests.appmanager # to launch the app manager is possible without having pycurl installed # (as the test app does not depend on pycurl) import pycurl print('Testing %s' % pycurl.version) // ... rest of the code ...
7698ec18abd25ed41b3104a382e7d8ca38d755ca
tests/unit/test_describe.py
tests/unit/test_describe.py
import pytest from mock import Mock from formica import cli from tests.unit.constants import STACK def test_describes_change_set(boto_client, change_set): cli.main(['describe', '--stack', STACK]) boto_client.assert_called_with('cloudformation') change_set.assert_called_with(stack=STACK) change_set.return_value.describe.assert_called_once()
import pytest from mock import Mock from formica import cli from tests.unit.constants import STACK def test_describes_change_set(boto_client, change_set): cli.main(['describe', '--stack', STACK]) change_set.assert_called_with(stack=STACK) change_set.return_value.describe.assert_called_once()
Remove Assert Call not necessary anymore
Remove Assert Call not necessary anymore
Python
mit
flomotlik/formica
python
## Code Before: import pytest from mock import Mock from formica import cli from tests.unit.constants import STACK def test_describes_change_set(boto_client, change_set): cli.main(['describe', '--stack', STACK]) boto_client.assert_called_with('cloudformation') change_set.assert_called_with(stack=STACK) change_set.return_value.describe.assert_called_once() ## Instruction: Remove Assert Call not necessary anymore ## Code After: import pytest from mock import Mock from formica import cli from tests.unit.constants import STACK def test_describes_change_set(boto_client, change_set): cli.main(['describe', '--stack', STACK]) change_set.assert_called_with(stack=STACK) change_set.return_value.describe.assert_called_once()
# ... existing code ... def test_describes_change_set(boto_client, change_set): cli.main(['describe', '--stack', STACK]) change_set.assert_called_with(stack=STACK) change_set.return_value.describe.assert_called_once() # ... rest of the code ...
2775c7f39c0e26b728fe6fb31168328ba4caeab2
opps/api/models.py
opps/api/models.py
import uuid import hmac from django.db import models from django.conf import settings from django.contrib.auth import get_user_model try: from hashlib import sha1 except ImportError: import sha sha1 = sha.sha User = get_user_model() class ApiKey(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL) key = models.CharField(u"Key", max_length=255) date_insert = models.DateTimeField(u"Date insert", auto_now_add=True) def __unicode__(self): return u"{} for {}".format(self.key, self.user) def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() return super(ApiKey, self).save(*args, **kwargs) def generate_key(self): new_uuid = uuid.uuid4() return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest() def create_api_key(sender, **kwargs): if kwargs.get('created') is True: ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, settings.AUTH_USER_MODEL)
import uuid import hmac from django.db import models from django.conf import settings from django.contrib.auth import get_user_model try: from hashlib import sha1 except ImportError: import sha sha1 = sha.sha User = get_user_model() class ApiKey(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL) key = models.CharField(u"Key", max_length=255) date_insert = models.DateTimeField(u"Date insert", auto_now_add=True) def __unicode__(self): return u"{} for {}".format(self.key, self.user) def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() return super(ApiKey, self).save(*args, **kwargs) def generate_key(self): new_uuid = uuid.uuid4() return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest() def create_api_key(sender, **kwargs): if kwargs.get('created') is True: ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, User)
Fix signal create api key on post save User
Fix signal create api key on post save User
Python
mit
williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,YACOWS/opps
python
## Code Before: import uuid import hmac from django.db import models from django.conf import settings from django.contrib.auth import get_user_model try: from hashlib import sha1 except ImportError: import sha sha1 = sha.sha User = get_user_model() class ApiKey(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL) key = models.CharField(u"Key", max_length=255) date_insert = models.DateTimeField(u"Date insert", auto_now_add=True) def __unicode__(self): return u"{} for {}".format(self.key, self.user) def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() return super(ApiKey, self).save(*args, **kwargs) def generate_key(self): new_uuid = uuid.uuid4() return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest() def create_api_key(sender, **kwargs): if kwargs.get('created') is True: ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, settings.AUTH_USER_MODEL) ## Instruction: Fix signal create api key on post save User ## Code After: import uuid import hmac from django.db import models from django.conf import settings from django.contrib.auth import get_user_model try: from hashlib import sha1 except ImportError: import sha sha1 = sha.sha User = get_user_model() class ApiKey(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL) key = models.CharField(u"Key", max_length=255) date_insert = models.DateTimeField(u"Date insert", auto_now_add=True) def __unicode__(self): return u"{} for {}".format(self.key, self.user) def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() return super(ApiKey, self).save(*args, **kwargs) def generate_key(self): new_uuid = uuid.uuid4() return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest() def create_api_key(sender, **kwargs): if kwargs.get('created') is True: ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, User)
... ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, User) ...
9828e5125cdbc01a773c60b1e211d0e434a2c5aa
tests/test_modules/test_pmac/test_pmacstatuspart.py
tests/test_modules/test_pmac/test_pmacstatuspart.py
from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, i10=1705244) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 4919.300698316487
from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, servoFreq=2500.04) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 2500.04
Change TestPmacStatusPart to not use i10
Change TestPmacStatusPart to not use i10
Python
apache-2.0
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
python
## Code Before: from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, i10=1705244) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 4919.300698316487 ## Instruction: Change TestPmacStatusPart to not use i10 ## Code After: from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, servoFreq=2500.04) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 2500.04
... child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, servoFreq=2500.04) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) ... def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 2500.04 ...
934f2c1520185800de45d6114a5aaa1fdee3e0c2
aeron-tools/src/test/java/co/uk/real_logic/aeron/tools/PubSubOptionsTest.java
aeron-tools/src/test/java/co/uk/real_logic/aeron/tools/PubSubOptionsTest.java
package co.uk.real_logic.aeron.tools; import org.apache.commons.cli.ParseException; import org.junit.Before; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import uk.co.real_logic.aeron.tools.PubSubOptions; /** * Created by bhorst on 3/3/15. */ public class PubSubOptionsTest { PubSubOptions opts; @Before public void SetUp() { opts = new PubSubOptions(); } @Test public void threadsShorthandValid() throws Exception { String[] args = { "-t", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test public void threadsLonghandValid() throws Exception { String[] args = { "--threads", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test (expected=ParseException.class) public void threadsInvalid() { } @Test (expected=ParseException.class) public void threadsLonghandInvalid() throws Exception { String[] args = { "--threads", "asdf" }; opts.parseArgs(args); } }
package co.uk.real_logic.aeron.tools; import org.apache.commons.cli.ParseException; import org.junit.Before; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import uk.co.real_logic.aeron.tools.PubSubOptions; /** * Created by bhorst on 3/3/15. */ public class PubSubOptionsTest { PubSubOptions opts; @Before public void setUp() { opts = new PubSubOptions(); } @Test public void threadsShorthandValid() throws Exception { String[] args = { "-t", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test public void threadsLonghandValid() throws Exception { String[] args = { "--threads", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test (expected=ParseException.class) public void threadsInvalid() throws Exception { String[] args = { "-t", "asdf" }; opts.parseArgs(args); } @Test (expected=ParseException.class) public void threadsLonghandInvalid() throws Exception { String[] args = { "--threads", "asdf" }; opts.parseArgs(args); } }
Fix the syntax of unit tests
Fix the syntax of unit tests
Java
apache-2.0
galderz/Aeron,rlankenau/Aeron,strangelydim/Aeron,tbrooks8/Aeron,oleksiyp/Aeron,RussellWilby/Aeron,EvilMcJerkface/Aeron,gkamal/Aeron,RussellWilby/Aeron,tbrooks8/Aeron,RussellWilby/Aeron,rlankenau/Aeron,buybackoff/Aeron,real-logic/Aeron,buybackoff/Aeron,mikeb01/Aeron,strangelydim/Aeron,strangelydim/Aeron,tbrooks8/Aeron,oleksiyp/Aeron,lennartj/Aeron,mikeb01/Aeron,gkamal/Aeron,UIKit0/Aeron,buybackoff/Aeron,gkamal/Aeron,lennartj/Aeron,EvilMcJerkface/Aeron,galderz/Aeron,oleksiyp/Aeron,EvilMcJerkface/Aeron,jerrinot/Aeron,UIKit0/Aeron,real-logic/Aeron,lennartj/Aeron,galderz/Aeron,jerrinot/Aeron,UIKit0/Aeron,galderz/Aeron,real-logic/Aeron,mikeb01/Aeron,jerrinot/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron,rlankenau/Aeron,mikeb01/Aeron
java
## Code Before: package co.uk.real_logic.aeron.tools; import org.apache.commons.cli.ParseException; import org.junit.Before; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import uk.co.real_logic.aeron.tools.PubSubOptions; /** * Created by bhorst on 3/3/15. */ public class PubSubOptionsTest { PubSubOptions opts; @Before public void SetUp() { opts = new PubSubOptions(); } @Test public void threadsShorthandValid() throws Exception { String[] args = { "-t", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test public void threadsLonghandValid() throws Exception { String[] args = { "--threads", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test (expected=ParseException.class) public void threadsInvalid() { } @Test (expected=ParseException.class) public void threadsLonghandInvalid() throws Exception { String[] args = { "--threads", "asdf" }; opts.parseArgs(args); } } ## Instruction: Fix the syntax of unit tests ## Code After: package co.uk.real_logic.aeron.tools; import org.apache.commons.cli.ParseException; import org.junit.Before; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import uk.co.real_logic.aeron.tools.PubSubOptions; /** * Created by bhorst on 3/3/15. */ public class PubSubOptionsTest { PubSubOptions opts; @Before public void setUp() { opts = new PubSubOptions(); } @Test public void threadsShorthandValid() throws Exception { String[] args = { "-t", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test public void threadsLonghandValid() throws Exception { String[] args = { "--threads", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); } @Test (expected=ParseException.class) public void threadsInvalid() throws Exception { String[] args = { "-t", "asdf" }; opts.parseArgs(args); } @Test (expected=ParseException.class) public void threadsLonghandInvalid() throws Exception { String[] args = { "--threads", "asdf" }; opts.parseArgs(args); } }
... { PubSubOptions opts; @Before public void setUp() { opts = new PubSubOptions(); } @Test public void threadsShorthandValid() throws Exception { String[] args = { "-t", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); ... } @Test public void threadsLonghandValid() throws Exception { String[] args = { "--threads", "1234" }; opts.parseArgs(args); assertThat(opts.getThreads(), is(1234L)); ... } @Test (expected=ParseException.class) public void threadsInvalid() throws Exception { String[] args = { "-t", "asdf" }; opts.parseArgs(args); } @Test (expected=ParseException.class) public void threadsLonghandInvalid() throws Exception { String[] args = { "--threads", "asdf" }; opts.parseArgs(args); } ...
4a32bd6bdc91564276a4e46210fc9019dd1b8a89
statement_format.py
statement_format.py
import pandas as pd def fn(row): if row['Type'] == 'DIRECT DEBIT': return 'DD' if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME': return 'BP' if row['Amount (GBP)'] < 0: return 'SO' raise Exception('Unintended state') df = pd.read_csv('statement.csv') output = df[['Date']] output['Type'] = df.apply(fn, axis=1) output['Description'] = df['Reference'] output['Paid Out'] = df['Amount (GBP)'].copy() output['Paid In'] = df['Amount (GBP)'].copy() output['Paid Out'] = output['Paid Out'] * -1 output['Paid Out'][output['Paid Out'] < 0] = None output['Paid In'][output['Paid In'] < 0] = None output['Balance'] = df['Balance (GBP)'] print(output) output.to_csv('output.csv', index=False)
import json import pandas as pd def fn(row): if row['Type'] == 'DIRECT DEBIT': return 'DD' if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME': return 'BP' if row['Amount (GBP)'] < 0: return 'SO' raise Exception('Unintended state') df = pd.read_csv('statement.csv') conversions = json.load(open('description_conversion.json')) output = df[['Date']] output['Type'] = df.apply(fn, axis=1) output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions) output['Paid Out'] = df['Amount (GBP)'].copy() output['Paid In'] = df['Amount (GBP)'].copy() output['Paid Out'] = output['Paid Out'] * -1 output['Paid Out'][output['Paid Out'] < 0] = None output['Paid In'][output['Paid In'] < 0] = None output['Balance'] = df['Balance (GBP)'] output.to_csv('output.csv', index=False)
Correct operation. Now to fix panda warnings
Correct operation. Now to fix panda warnings
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
python
## Code Before: import pandas as pd def fn(row): if row['Type'] == 'DIRECT DEBIT': return 'DD' if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME': return 'BP' if row['Amount (GBP)'] < 0: return 'SO' raise Exception('Unintended state') df = pd.read_csv('statement.csv') output = df[['Date']] output['Type'] = df.apply(fn, axis=1) output['Description'] = df['Reference'] output['Paid Out'] = df['Amount (GBP)'].copy() output['Paid In'] = df['Amount (GBP)'].copy() output['Paid Out'] = output['Paid Out'] * -1 output['Paid Out'][output['Paid Out'] < 0] = None output['Paid In'][output['Paid In'] < 0] = None output['Balance'] = df['Balance (GBP)'] print(output) output.to_csv('output.csv', index=False) ## Instruction: Correct operation. Now to fix panda warnings ## Code After: import json import pandas as pd def fn(row): if row['Type'] == 'DIRECT DEBIT': return 'DD' if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME': return 'BP' if row['Amount (GBP)'] < 0: return 'SO' raise Exception('Unintended state') df = pd.read_csv('statement.csv') conversions = json.load(open('description_conversion.json')) output = df[['Date']] output['Type'] = df.apply(fn, axis=1) output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions) output['Paid Out'] = df['Amount (GBP)'].copy() output['Paid In'] = df['Amount (GBP)'].copy() output['Paid Out'] = output['Paid Out'] * -1 output['Paid Out'][output['Paid Out'] < 0] = None output['Paid In'][output['Paid In'] < 0] = None output['Balance'] = df['Balance (GBP)'] output.to_csv('output.csv', index=False)
// ... existing code ... import json import pandas as pd // ... modified code ... df = pd.read_csv('statement.csv') conversions = json.load(open('description_conversion.json')) output = df[['Date']] output['Type'] = df.apply(fn, axis=1) output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions) output['Paid Out'] = df['Amount (GBP)'].copy() output['Paid In'] = df['Amount (GBP)'].copy() output['Paid Out'] = output['Paid Out'] * -1 ... output['Paid In'][output['Paid In'] < 0] = None output['Balance'] = df['Balance (GBP)'] output.to_csv('output.csv', index=False) // ... rest of the code ...
121929b8d719cac8e7d11cf825e588518e8b1a0c
src/main/java/com/github/cstroe/sqs/repository/RepositoryFactory.java
src/main/java/com/github/cstroe/sqs/repository/RepositoryFactory.java
package com.github.cstroe.sqs.repository; public class RepositoryFactory { public static NotebookRepository notebook() { return new NotebookRepository(); } public static NoteRepository note() { return new NoteRepository(); } }
package com.github.cstroe.sqs.repository; public class RepositoryFactory { }
Remove mentions to old repositories.
Remove mentions to old repositories.
Java
agpl-3.0
cstroe/SpendHawk,cstroe/SpendHawk,cstroe/SpendHawk
java
## Code Before: package com.github.cstroe.sqs.repository; public class RepositoryFactory { public static NotebookRepository notebook() { return new NotebookRepository(); } public static NoteRepository note() { return new NoteRepository(); } } ## Instruction: Remove mentions to old repositories. ## Code After: package com.github.cstroe.sqs.repository; public class RepositoryFactory { }
... package com.github.cstroe.sqs.repository; public class RepositoryFactory { } ...
60b039aabb94c1e5a50bb19bb7267a0fd3ceaa86
mollie/api/objects/list.py
mollie/api/objects/list.py
from .base import Base class List(Base): current = None def __init__(self, result, object_type): Base.__init__(self, result) self.object_type = object_type def __len__(self): """Return the count field.""" return int(self['count']) def get_object_name(self): return self.object_type.__name__.lower() + 's' def __iter__(self): """Implement iterator interface.""" self.current = None return self def __next__(self): """Implement iterator interface.""" if self.current is None: self.current = 0 else: self.current += 1 try: item = self['_embedded'][self.get_object_name()][self.current] return self.object_type(item) except IndexError: raise StopIteration next = __next__ # support python2 iterator interface @property def count(self): if 'count' not in self: return None return int(self['count']) def get_offset(self): if 'offset' not in self: return None return self['offset']
from .base import Base class List(Base): current = None def __init__(self, result, object_type): Base.__init__(self, result) self.object_type = object_type def __len__(self): """Return the count field.""" return int(self['count']) def get_object_name(self): return self.object_type.__name__.lower() + 's' def __iter__(self): """Implement iterator interface.""" self.current = None return self def __next__(self): """Implement iterator interface.""" if self.current is None: self.current = 0 else: self.current += 1 try: item = self['_embedded'][self.get_object_name()][self.current] return self.object_type(item) except IndexError: raise StopIteration next = __next__ # support python2 iterator interface @property def count(self): if 'count' not in self: return None return int(self['count'])
Drop obsoleted support for offset.
Drop obsoleted support for offset.
Python
bsd-2-clause
mollie/mollie-api-python
python
## Code Before: from .base import Base class List(Base): current = None def __init__(self, result, object_type): Base.__init__(self, result) self.object_type = object_type def __len__(self): """Return the count field.""" return int(self['count']) def get_object_name(self): return self.object_type.__name__.lower() + 's' def __iter__(self): """Implement iterator interface.""" self.current = None return self def __next__(self): """Implement iterator interface.""" if self.current is None: self.current = 0 else: self.current += 1 try: item = self['_embedded'][self.get_object_name()][self.current] return self.object_type(item) except IndexError: raise StopIteration next = __next__ # support python2 iterator interface @property def count(self): if 'count' not in self: return None return int(self['count']) def get_offset(self): if 'offset' not in self: return None return self['offset'] ## Instruction: Drop obsoleted support for offset. ## Code After: from .base import Base class List(Base): current = None def __init__(self, result, object_type): Base.__init__(self, result) self.object_type = object_type def __len__(self): """Return the count field.""" return int(self['count']) def get_object_name(self): return self.object_type.__name__.lower() + 's' def __iter__(self): """Implement iterator interface.""" self.current = None return self def __next__(self): """Implement iterator interface.""" if self.current is None: self.current = 0 else: self.current += 1 try: item = self['_embedded'][self.get_object_name()][self.current] return self.object_type(item) except IndexError: raise StopIteration next = __next__ # support python2 iterator interface @property def count(self): if 'count' not in self: return None return int(self['count'])
// ... existing code ... if 'count' not in self: return None return int(self['count']) // ... rest of the code ...
d6843bfe2e2f89e32acb57b83c1a4841ecc5c1c0
docs/src/main/kotlin/net/dean/jraw/docs/DocLinkGenerator.kt
docs/src/main/kotlin/net/dean/jraw/docs/DocLinkGenerator.kt
package net.dean.jraw.docs class DocLinkGenerator { fun generate(name: String): String { val clazz = ProjectTypeFinder.from(name) ?: throw IllegalArgumentException("No JRAW classes with (simple) name '$name'") return generate(clazz) } fun generate(clazz: Class<*>): String { return BASE + clazz.name.replace('.', '/') + ".html" } companion object { // TODO update when we actually update docs private const val BASE = "#" } }
package net.dean.jraw.docs import net.dean.jraw.Version class DocLinkGenerator { fun generate(name: String): String { val clazz = ProjectTypeFinder.from(name) ?: throw IllegalArgumentException("No JRAW classes with (simple) name '$name'") return generate(clazz) } fun generate(clazz: Class<*>): String { return BASE + clazz.name.replace('.', '/') + ".html" } companion object { private val BASE = "https://jitpack.io/com/github/mattbdean/JRAW/v${Version.get()}/javadoc/" } }
Add working Javadoc link to gitbook site
Add working Javadoc link to gitbook site
Kotlin
mit
thatJavaNerd/JRAW,thatJavaNerd/JRAW,Saketme/JRAW,Saketme/JRAW,Saketme/JRAW,thatJavaNerd/JRAW
kotlin
## Code Before: package net.dean.jraw.docs class DocLinkGenerator { fun generate(name: String): String { val clazz = ProjectTypeFinder.from(name) ?: throw IllegalArgumentException("No JRAW classes with (simple) name '$name'") return generate(clazz) } fun generate(clazz: Class<*>): String { return BASE + clazz.name.replace('.', '/') + ".html" } companion object { // TODO update when we actually update docs private const val BASE = "#" } } ## Instruction: Add working Javadoc link to gitbook site ## Code After: package net.dean.jraw.docs import net.dean.jraw.Version class DocLinkGenerator { fun generate(name: String): String { val clazz = ProjectTypeFinder.from(name) ?: throw IllegalArgumentException("No JRAW classes with (simple) name '$name'") return generate(clazz) } fun generate(clazz: Class<*>): String { return BASE + clazz.name.replace('.', '/') + ".html" } companion object { private val BASE = "https://jitpack.io/com/github/mattbdean/JRAW/v${Version.get()}/javadoc/" } }
// ... existing code ... package net.dean.jraw.docs import net.dean.jraw.Version class DocLinkGenerator { fun generate(name: String): String { // ... modified code ... } companion object { private val BASE = "https://jitpack.io/com/github/mattbdean/JRAW/v${Version.get()}/javadoc/" } } // ... rest of the code ...
33c518d34b7657549e5231aa5e5cd1a1206da1a5
setup.py
setup.py
import os from setuptools import setup def get_version_from_git_most_recent_tag(): return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v") def get_readme_content(): current_file_dir = os.path.dirname(__file__) readme_file_path = os.path.join(current_file_dir, "README.md") return open(readme_file_path).read() setup( name='telegram-bot', version=get_version_from_git_most_recent_tag(), description='Python Telegram bot API framework', long_description=get_readme_content(), url='https://github.com/alvarogzp/telegram-bot', author='Alvaro Gutierrez Perez', author_email='[email protected]', license='GPL-3.0', packages=['bot'], install_requires=[ 'requests', 'pytz' ], python_requires='>=3', )
import os from setuptools import setup, find_packages def get_version_from_git_most_recent_tag(): return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v") def get_readme_content(): current_file_dir = os.path.dirname(__file__) readme_file_path = os.path.join(current_file_dir, "README.md") return open(readme_file_path).read() setup( name='telegram-bot', version=get_version_from_git_most_recent_tag(), description='Python Telegram bot API framework', long_description=get_readme_content(), url='https://github.com/alvarogzp/telegram-bot', author='Alvaro Gutierrez Perez', author_email='[email protected]', license='GPL-3.0', packages=find_packages(), install_requires=[ 'requests', 'pytz' ], python_requires='>=3', )
Use find_packages() to export all packages automatically on install
Use find_packages() to export all packages automatically on install
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
python
## Code Before: import os from setuptools import setup def get_version_from_git_most_recent_tag(): return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v") def get_readme_content(): current_file_dir = os.path.dirname(__file__) readme_file_path = os.path.join(current_file_dir, "README.md") return open(readme_file_path).read() setup( name='telegram-bot', version=get_version_from_git_most_recent_tag(), description='Python Telegram bot API framework', long_description=get_readme_content(), url='https://github.com/alvarogzp/telegram-bot', author='Alvaro Gutierrez Perez', author_email='[email protected]', license='GPL-3.0', packages=['bot'], install_requires=[ 'requests', 'pytz' ], python_requires='>=3', ) ## Instruction: Use find_packages() to export all packages automatically on install ## Code After: import os from setuptools import setup, find_packages def get_version_from_git_most_recent_tag(): return os.popen("git tag -l v* | tail --lines=1").read().strip().lstrip("v") def get_readme_content(): current_file_dir = os.path.dirname(__file__) readme_file_path = os.path.join(current_file_dir, "README.md") return open(readme_file_path).read() setup( name='telegram-bot', version=get_version_from_git_most_recent_tag(), description='Python Telegram bot API framework', long_description=get_readme_content(), url='https://github.com/alvarogzp/telegram-bot', author='Alvaro Gutierrez Perez', author_email='[email protected]', license='GPL-3.0', packages=find_packages(), install_requires=[ 'requests', 'pytz' ], python_requires='>=3', )
// ... existing code ... import os from setuptools import setup, find_packages def get_version_from_git_most_recent_tag(): // ... modified code ... license='GPL-3.0', packages=find_packages(), install_requires=[ 'requests', // ... rest of the code ...
5f0ef4eda80a6db44b091bb8693589ccba5544c6
src/lib/marble/geodata/data/GeoDataTimeStamp_p.h
src/lib/marble/geodata/data/GeoDataTimeStamp_p.h
// // This file is part of the Marble Virtual Globe. // // This program is free software licensed under the GNU LGPL. You can // find a copy of this license in LICENSE.txt in the top directory of // the source code. // // Copyright 2010 Harshit Jain <[email protected]> // #ifndef GEODATATIMESTAMPPRIVATE_H #define GEODATATIMESTAMPPRIVATE_H #include <QDateTime> #include "GeoDataTypes.h" #include <GeoDataTimeStamp.h> namespace Marble { class GeoDataTimeStampPrivate { public: QDateTime m_when; GeoDataTimeStamp::TimeResolution m_resolution; }; } // namespace Marble #endif //GEODATATIMESTAMPPRIVATE_H
// // This file is part of the Marble Virtual Globe. // // This program is free software licensed under the GNU LGPL. You can // find a copy of this license in LICENSE.txt in the top directory of // the source code. // // Copyright 2010 Harshit Jain <[email protected]> // #ifndef GEODATATIMESTAMPPRIVATE_H #define GEODATATIMESTAMPPRIVATE_H #include <QDateTime> #include "GeoDataTypes.h" #include <GeoDataTimeStamp.h> namespace Marble { class GeoDataTimeStampPrivate { public: QDateTime m_when; GeoDataTimeStamp::TimeResolution m_resolution; GeoDataTimeStampPrivate(); }; GeoDataTimeStampPrivate::GeoDataTimeStampPrivate() : m_resolution( GeoDataTimeStamp::SecondResolution ) { // nothing to do } } // namespace Marble #endif //GEODATATIMESTAMPPRIVATE_H
Add missing initialization of time resolution
Add missing initialization of time resolution
C
lgpl-2.1
quannt24/marble,probonopd/marble,probonopd/marble,probonopd/marble,David-Gil/marble-dev,tucnak/marble,probonopd/marble,David-Gil/marble-dev,tzapzoor/marble,quannt24/marble,tzapzoor/marble,quannt24/marble,probonopd/marble,tucnak/marble,David-Gil/marble-dev,tzapzoor/marble,tzapzoor/marble,tucnak/marble,quannt24/marble,tzapzoor/marble,quannt24/marble,tucnak/marble,tzapzoor/marble,quannt24/marble,David-Gil/marble-dev,tucnak/marble,quannt24/marble,tucnak/marble,probonopd/marble,tucnak/marble,probonopd/marble,David-Gil/marble-dev,tzapzoor/marble,tzapzoor/marble,David-Gil/marble-dev
c
## Code Before: // // This file is part of the Marble Virtual Globe. // // This program is free software licensed under the GNU LGPL. You can // find a copy of this license in LICENSE.txt in the top directory of // the source code. // // Copyright 2010 Harshit Jain <[email protected]> // #ifndef GEODATATIMESTAMPPRIVATE_H #define GEODATATIMESTAMPPRIVATE_H #include <QDateTime> #include "GeoDataTypes.h" #include <GeoDataTimeStamp.h> namespace Marble { class GeoDataTimeStampPrivate { public: QDateTime m_when; GeoDataTimeStamp::TimeResolution m_resolution; }; } // namespace Marble #endif //GEODATATIMESTAMPPRIVATE_H ## Instruction: Add missing initialization of time resolution ## Code After: // // This file is part of the Marble Virtual Globe. // // This program is free software licensed under the GNU LGPL. You can // find a copy of this license in LICENSE.txt in the top directory of // the source code. // // Copyright 2010 Harshit Jain <[email protected]> // #ifndef GEODATATIMESTAMPPRIVATE_H #define GEODATATIMESTAMPPRIVATE_H #include <QDateTime> #include "GeoDataTypes.h" #include <GeoDataTimeStamp.h> namespace Marble { class GeoDataTimeStampPrivate { public: QDateTime m_when; GeoDataTimeStamp::TimeResolution m_resolution; GeoDataTimeStampPrivate(); }; GeoDataTimeStampPrivate::GeoDataTimeStampPrivate() : m_resolution( GeoDataTimeStamp::SecondResolution ) { // nothing to do } } // namespace Marble #endif //GEODATATIMESTAMPPRIVATE_H
# ... existing code ... QDateTime m_when; GeoDataTimeStamp::TimeResolution m_resolution; GeoDataTimeStampPrivate(); }; GeoDataTimeStampPrivate::GeoDataTimeStampPrivate() : m_resolution( GeoDataTimeStamp::SecondResolution ) { // nothing to do } } // namespace Marble # ... rest of the code ...
9a5dc452f181fac45a8c0efcd3f70e116efefc2a
erpnext/patches/v7_0/setup_account_table_for_expense_claim_type_if_exists.py
erpnext/patches/v7_0/setup_account_table_for_expense_claim_type_if_exists.py
from __future__ import unicode_literals import frappe def execute(): frappe.reload_doc("hr", "doctype", "expense_claim_type") for expense_claim_type in frappe.get_all("Expense Claim Type", fields=["name", "default_account"]): if expense_claim_type.default_account: doc = frappe.get_doc("Expense Claim Type", expense_claim_type.name) doc.append("accounts", { "company": frappe.db.get_value("Account", expense_claim_type.default_account, "company"), "default_account": expense_claim_type.default_account, }) doc.save(ignore_permissions=True)
from __future__ import unicode_literals import frappe def execute(): frappe.reload_doc("hr", "doctype", "expense_claim_type") frappe.reload_doc("hr", "doctype", "expense_claim_account") for expense_claim_type in frappe.get_all("Expense Claim Type", fields=["name", "default_account"]): if expense_claim_type.default_account: doc = frappe.get_doc("Expense Claim Type", expense_claim_type.name) doc.append("accounts", { "company": frappe.db.get_value("Account", expense_claim_type.default_account, "company"), "default_account": expense_claim_type.default_account, }) doc.save(ignore_permissions=True)
Patch fixed for expense claim type
Patch fixed for expense claim type
Python
agpl-3.0
njmube/erpnext,indictranstech/erpnext,geekroot/erpnext,geekroot/erpnext,gsnbng/erpnext,Aptitudetech/ERPNext,indictranstech/erpnext,njmube/erpnext,indictranstech/erpnext,njmube/erpnext,geekroot/erpnext,geekroot/erpnext,indictranstech/erpnext,njmube/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
python
## Code Before: from __future__ import unicode_literals import frappe def execute(): frappe.reload_doc("hr", "doctype", "expense_claim_type") for expense_claim_type in frappe.get_all("Expense Claim Type", fields=["name", "default_account"]): if expense_claim_type.default_account: doc = frappe.get_doc("Expense Claim Type", expense_claim_type.name) doc.append("accounts", { "company": frappe.db.get_value("Account", expense_claim_type.default_account, "company"), "default_account": expense_claim_type.default_account, }) doc.save(ignore_permissions=True) ## Instruction: Patch fixed for expense claim type ## Code After: from __future__ import unicode_literals import frappe def execute(): frappe.reload_doc("hr", "doctype", "expense_claim_type") frappe.reload_doc("hr", "doctype", "expense_claim_account") for expense_claim_type in frappe.get_all("Expense Claim Type", fields=["name", "default_account"]): if expense_claim_type.default_account: doc = frappe.get_doc("Expense Claim Type", expense_claim_type.name) doc.append("accounts", { "company": frappe.db.get_value("Account", expense_claim_type.default_account, "company"), "default_account": expense_claim_type.default_account, }) doc.save(ignore_permissions=True)
// ... existing code ... def execute(): frappe.reload_doc("hr", "doctype", "expense_claim_type") frappe.reload_doc("hr", "doctype", "expense_claim_account") for expense_claim_type in frappe.get_all("Expense Claim Type", fields=["name", "default_account"]): if expense_claim_type.default_account: // ... rest of the code ...
56396f980236f6d909f63d7faaddd357f5fe235b
stock_quant_merge/models/stock.py
stock_quant_merge/models/stock.py
from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
Use browse record instead of ids
[MOD] Use browse record instead of ids
Python
agpl-3.0
InakiZabala/odoomrp-wip,Eficent/odoomrp-wip,diagramsoftware/odoomrp-wip,jobiols/odoomrp-wip,Antiun/odoomrp-wip,factorlibre/odoomrp-wip,raycarnes/odoomrp-wip,Daniel-CA/odoomrp-wip-public,esthermm/odoomrp-wip,odoomrp/odoomrp-wip,Daniel-CA/odoomrp-wip-public,odoomrp/odoomrp-wip,oihane/odoomrp-wip,jobiols/odoomrp-wip,odoocn/odoomrp-wip,michaeljohn32/odoomrp-wip,alhashash/odoomrp-wip,esthermm/odoomrp-wip,agaldona/odoomrp-wip-1,Endika/odoomrp-wip,ddico/odoomrp-wip,oihane/odoomrp-wip,jorsea/odoomrp-wip,Eficent/odoomrp-wip,maljac/odoomrp-wip,xpansa/odoomrp-wip,diagramsoftware/odoomrp-wip,sergiocorato/odoomrp-wip,sergiocorato/odoomrp-wip,windedge/odoomrp-wip,agaldona/odoomrp-wip-1,alfredoavanzosc/odoomrp-wip-1,dvitme/odoomrp-wip,slevenhagen/odoomrp-wip-npg,factorlibre/odoomrp-wip,invitu/odoomrp-wip
python
## Code Before: from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants() ## Instruction: [MOD] Use browse record instead of ids ## Code After: from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
# ... existing code ... @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), # ... modified code ... self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model # ... rest of the code ...
b830f2d3d8d30623852010b0b2d4084fcf338742
xstream/src/java/com/thoughtworks/xstream/core/util/FastStack.java
xstream/src/java/com/thoughtworks/xstream/core/util/FastStack.java
package com.thoughtworks.xstream.core.util; public final class FastStack { private Object[] stack; private int pointer; public FastStack(int initialCapacity) { stack = new Object[initialCapacity]; } public Object push(Object value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; return value; } public void popSilently() { pointer--; } public Object pop() { return stack[--pointer]; } public Object peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public boolean hasStuff() { return pointer > 0; } public Object get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Object[] newStack = new Object[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } public String toString() { StringBuffer result = new StringBuffer("["); for (int i = 0; i < pointer; i++) { if (i > 0) { result.append(", "); } result.append(stack[i]); } result.append(']'); return result.toString(); } }
package com.thoughtworks.xstream.core.util; public final class FastStack { private Object[] stack; private int pointer; public FastStack(int initialCapacity) { stack = new Object[initialCapacity]; } public Object push(Object value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; return value; } public void popSilently() { stack[--pointer] = null; } public Object pop() { final Object result = stack[--pointer]; stack[pointer] = null; return result; } public Object peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public boolean hasStuff() { return pointer > 0; } public Object get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Object[] newStack = new Object[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } public String toString() { StringBuffer result = new StringBuffer("["); for (int i = 0; i < pointer; i++) { if (i > 0) { result.append(", "); } result.append(stack[i]); } result.append(']'); return result.toString(); } }
Drop unnecessary reference to popped elements to allow finalization through GC (XSTR-264).
Drop unnecessary reference to popped elements to allow finalization through GC (XSTR-264). git-svn-id: fe6d842192ccfb78748eb71580d1ce65f168b559@649 9830eeb5-ddf4-0310-9ef7-f4b9a3e3227e
Java
bsd-3-clause
svn2github/xstream,svn2github/xstream
java
## Code Before: package com.thoughtworks.xstream.core.util; public final class FastStack { private Object[] stack; private int pointer; public FastStack(int initialCapacity) { stack = new Object[initialCapacity]; } public Object push(Object value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; return value; } public void popSilently() { pointer--; } public Object pop() { return stack[--pointer]; } public Object peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public boolean hasStuff() { return pointer > 0; } public Object get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Object[] newStack = new Object[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } public String toString() { StringBuffer result = new StringBuffer("["); for (int i = 0; i < pointer; i++) { if (i > 0) { result.append(", "); } result.append(stack[i]); } result.append(']'); return result.toString(); } } ## Instruction: Drop unnecessary reference to popped elements to allow finalization through GC (XSTR-264). git-svn-id: fe6d842192ccfb78748eb71580d1ce65f168b559@649 9830eeb5-ddf4-0310-9ef7-f4b9a3e3227e ## Code After: package com.thoughtworks.xstream.core.util; public final class FastStack { private Object[] stack; private int pointer; public FastStack(int initialCapacity) { stack = new Object[initialCapacity]; } public Object push(Object value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; return value; } public void popSilently() { stack[--pointer] = null; } public Object pop() { final Object result = stack[--pointer]; stack[pointer] = null; return result; } public Object peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public boolean hasStuff() { return pointer > 0; } public Object get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Object[] newStack = new Object[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } public String toString() { StringBuffer result = new StringBuffer("["); for (int i = 0; i < pointer; i++) { if (i > 0) { result.append(", "); } result.append(stack[i]); } result.append(']'); return result.toString(); } }
... } public void popSilently() { stack[--pointer] = null; } public Object pop() { final Object result = stack[--pointer]; stack[pointer] = null; return result; } public Object peek() { ...
f819adcbf7fac24094690db9f46c44727b4c1f81
program/driver/us100.c
program/driver/us100.c
/*==============================================================================================*/ /*==============================================================================================*/ #include "QuadCopterConfig.h" Ultrasonic_t Ultrasonic = { .lenHigh = 0, .lenLow = 0, .d = 0 }; /*==============================================================================================*/ /*==============================================================================================* **函數 : us100_distant **功能 : get 1 calculated distant data from the data received by USART **輸入 : Ultrasonic.lenHigh, Ultrasonic.lenLow **輸出 : Ultrasonic.d (mm) **使用 : us100_distant(); **==============================================================================================*/ /*==============================================================================================*/ void us100_distant(){ //reading data serial.putc('U'); serial2.putc('1'); //vTaskDelay(500); //calculating the distance //if(serial2.getc()){ Ultrasonic.lenHigh = serial.getc(); serial2.putc('2'); Ultrasonic.lenLow = serial.getc(); serial2.putc('3'); Ultrasonic.d = Ultrasonic.lenHigh*256 + Ultrasonic.lenLow; //} }
/*==============================================================================================*/ /*==============================================================================================*/ #include "QuadCopterConfig.h" /* Connection methods of Ultrasonic */ #define ULT_USE_UART2 1 #define ULT_USE_PWM 0 Ultrasonic_t Ultrasonic = { .lenHigh = 0, .lenLow = 0, .d = 0 }; /*==============================================================================================*/ /*==============================================================================================* **函數 : us100_distant **功能 : get 1 calculated distant data from the data received by USART **輸入 : Ultrasonic.lenHigh, Ultrasonic.lenLow **輸出 : Ultrasonic.d (mm) **使用 : print_us100_distant(); **==============================================================================================*/ /*==============================================================================================*/ void print_us100_distant(){ #if ULT_USE_UART2 serial2.putc('U'); vTaskDelay(500); Ultrasonic.lenHigh = serial2.getc(); Ultrasonic.lenLow = serial2.getc(); Ultrasonic.d = (Ultrasonic.lenHigh*256 + Ultrasonic.lenLow)*0.1; serial.printf("Distance: "); serial.printf("%d",Ultrasonic.d); serial.printf(" cm\n\r"); vTaskDelay(30); #endif }
Make US100 sensor get distance successfully.
Make US100 sensor get distance successfully.
C
mit
zxc2694/STM32F429_Quadrotor,zxc2694/STM32F429_Quadrotor,zxc2694/STM32F429_Quadrotor
c
## Code Before: /*==============================================================================================*/ /*==============================================================================================*/ #include "QuadCopterConfig.h" Ultrasonic_t Ultrasonic = { .lenHigh = 0, .lenLow = 0, .d = 0 }; /*==============================================================================================*/ /*==============================================================================================* **函數 : us100_distant **功能 : get 1 calculated distant data from the data received by USART **輸入 : Ultrasonic.lenHigh, Ultrasonic.lenLow **輸出 : Ultrasonic.d (mm) **使用 : us100_distant(); **==============================================================================================*/ /*==============================================================================================*/ void us100_distant(){ //reading data serial.putc('U'); serial2.putc('1'); //vTaskDelay(500); //calculating the distance //if(serial2.getc()){ Ultrasonic.lenHigh = serial.getc(); serial2.putc('2'); Ultrasonic.lenLow = serial.getc(); serial2.putc('3'); Ultrasonic.d = Ultrasonic.lenHigh*256 + Ultrasonic.lenLow; //} } ## Instruction: Make US100 sensor get distance successfully. ## Code After: /*==============================================================================================*/ /*==============================================================================================*/ #include "QuadCopterConfig.h" /* Connection methods of Ultrasonic */ #define ULT_USE_UART2 1 #define ULT_USE_PWM 0 Ultrasonic_t Ultrasonic = { .lenHigh = 0, .lenLow = 0, .d = 0 }; /*==============================================================================================*/ /*==============================================================================================* **函數 : us100_distant **功能 : get 1 calculated distant data from the data received by USART **輸入 : Ultrasonic.lenHigh, Ultrasonic.lenLow **輸出 : Ultrasonic.d (mm) **使用 : print_us100_distant(); **==============================================================================================*/ /*==============================================================================================*/ void print_us100_distant(){ #if ULT_USE_UART2 serial2.putc('U'); vTaskDelay(500); Ultrasonic.lenHigh = serial2.getc(); Ultrasonic.lenLow = serial2.getc(); Ultrasonic.d = (Ultrasonic.lenHigh*256 + Ultrasonic.lenLow)*0.1; serial.printf("Distance: "); serial.printf("%d",Ultrasonic.d); serial.printf(" cm\n\r"); vTaskDelay(30); #endif }
... /*==============================================================================================*/ /*==============================================================================================*/ #include "QuadCopterConfig.h" /* Connection methods of Ultrasonic */ #define ULT_USE_UART2 1 #define ULT_USE_PWM 0 Ultrasonic_t Ultrasonic = { .lenHigh = 0, ... /*==============================================================================================*/ /*==============================================================================================* **函數 : us100_distant **功能 : get 1 calculated distant data from the data received by USART **輸入 : Ultrasonic.lenHigh, Ultrasonic.lenLow **輸出 : Ultrasonic.d (mm) **使用 : print_us100_distant(); **==============================================================================================*/ /*==============================================================================================*/ void print_us100_distant(){ #if ULT_USE_UART2 serial2.putc('U'); vTaskDelay(500); Ultrasonic.lenHigh = serial2.getc(); Ultrasonic.lenLow = serial2.getc(); Ultrasonic.d = (Ultrasonic.lenHigh*256 + Ultrasonic.lenLow)*0.1; serial.printf("Distance: "); serial.printf("%d",Ultrasonic.d); serial.printf(" cm\n\r"); vTaskDelay(30); #endif } ...
ad934e49a43a8340af9d52bbac86bede45d0e84d
aero/adapters/brew.py
aero/adapters/brew.py
__author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.search_info(line) )) ) for line in response.splitlines() if line]) return {} def search_info(self, query): info = self.info(query) return filter( None, [ info[0], info[1] if len(info) > 1 else None, info[2] if len(info) > 2 else None, ] ) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
__author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), self.search_info(self.package_name(line)) ) for line in response.splitlines() if line]) return {} def search_info(self, query): response = self._execute_command('aero', ['info', query], False)[0] from re import split lines = response.splitlines() idx = lines.index(' ________________________________________ __________________________________________________ ') return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
Use aero info instead for caching info
Use aero info instead for caching info Brew requires brew info for additional information. If we instead call aero info we can at least cache the info calls for later.
Python
bsd-3-clause
Aeronautics/aero
python
## Code Before: __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.search_info(line) )) ) for line in response.splitlines() if line]) return {} def search_info(self, query): info = self.info(query) return filter( None, [ info[0], info[1] if len(info) > 1 else None, info[2] if len(info) > 2 else None, ] ) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {} ## Instruction: Use aero info instead for caching info Brew requires brew info for additional information. If we instead call aero info we can at least cache the info calls for later. ## Code After: __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), self.search_info(self.package_name(line)) ) for line in response.splitlines() if line]) return {} def search_info(self, query): response = self._execute_command('aero', ['info', query], False)[0] from re import split lines = response.splitlines() idx = lines.index(' ________________________________________ __________________________________________________ ') return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
# ... existing code ... if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), self.search_info(self.package_name(line)) ) for line in response.splitlines() if line]) return {} def search_info(self, query): response = self._execute_command('aero', ['info', query], False)[0] from re import split lines = response.splitlines() idx = lines.index(' ________________________________________ __________________________________________________ ') return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) def info(self, query): if '/' in query: # ... rest of the code ...
ff476b33c26a9067e6ac64b2c161d29b0febea33
py/capnptools/examples/tests/test_books.py
py/capnptools/examples/tests/test_books.py
import unittest from examples import books class BooksTest(unittest.TestCase): def test_builder(self): book = books.MallocMessageBuilder().init_root(books.Book) book.title = 'Moby-Dick; or, The Whale' book.authors = ['Herman Melville'] self.assertEqual( { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], }, book._as_dict(), ) book = book._as_reader() self.assertEqual('Moby-Dick; or, The Whale', book.title) self.assertEqual(['Herman Melville'], book.authors._as_dict()) self.assertEqual( { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], }, book._as_dict(), ) if __name__ == '__main__': unittest.main()
import unittest import os import tempfile from examples import books class BooksTest(unittest.TestCase): BOOK = { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], } def test_builder(self): book = books.MallocMessageBuilder().init_root(books.Book) book.title = self.BOOK['title'] book.authors = self.BOOK['authors'] self.assertEqual(self.BOOK, book._as_dict()) book = book._as_reader() self.assertEqual(self.BOOK['title'], book.title) self.assertEqual(self.BOOK['authors'], book.authors._as_dict()) self.assertEqual(self.BOOK, book._as_dict()) def test_write(self): builder = books.MallocMessageBuilder() book = builder.init_root(books.Book) book.title = self.BOOK['title'] book.authors = self.BOOK['authors'] for read_cls, write_func in [ ('StreamFdMessageReader', 'write_to'), ('PackedFdMessageReader', 'write_packed_to')]: with self.subTest(read_cls=read_cls, write_func=write_func): fd, path = tempfile.mkstemp() try: getattr(builder, write_func)(fd) os.close(fd) fd = os.open(path, os.O_RDONLY) reader = getattr(books, read_cls)(fd) book = reader.get_root(books.Book) self.assertEqual(self.BOOK, book._as_dict()) finally: os.unlink(path) os.close(fd) if __name__ == '__main__': unittest.main()
Add unit tests for write_to and write_packed_to
Add unit tests for write_to and write_packed_to
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
python
## Code Before: import unittest from examples import books class BooksTest(unittest.TestCase): def test_builder(self): book = books.MallocMessageBuilder().init_root(books.Book) book.title = 'Moby-Dick; or, The Whale' book.authors = ['Herman Melville'] self.assertEqual( { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], }, book._as_dict(), ) book = book._as_reader() self.assertEqual('Moby-Dick; or, The Whale', book.title) self.assertEqual(['Herman Melville'], book.authors._as_dict()) self.assertEqual( { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], }, book._as_dict(), ) if __name__ == '__main__': unittest.main() ## Instruction: Add unit tests for write_to and write_packed_to ## Code After: import unittest import os import tempfile from examples import books class BooksTest(unittest.TestCase): BOOK = { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], } def test_builder(self): book = books.MallocMessageBuilder().init_root(books.Book) book.title = self.BOOK['title'] book.authors = self.BOOK['authors'] self.assertEqual(self.BOOK, book._as_dict()) book = book._as_reader() self.assertEqual(self.BOOK['title'], book.title) self.assertEqual(self.BOOK['authors'], book.authors._as_dict()) self.assertEqual(self.BOOK, book._as_dict()) def test_write(self): builder = books.MallocMessageBuilder() book = builder.init_root(books.Book) book.title = self.BOOK['title'] book.authors = self.BOOK['authors'] for read_cls, write_func in [ ('StreamFdMessageReader', 'write_to'), ('PackedFdMessageReader', 'write_packed_to')]: with self.subTest(read_cls=read_cls, write_func=write_func): fd, path = tempfile.mkstemp() try: getattr(builder, write_func)(fd) os.close(fd) fd = os.open(path, os.O_RDONLY) reader = getattr(books, read_cls)(fd) book = reader.get_root(books.Book) self.assertEqual(self.BOOK, book._as_dict()) finally: os.unlink(path) os.close(fd) if __name__ == '__main__': unittest.main()
// ... existing code ... import unittest import os import tempfile from examples import books // ... modified code ... class BooksTest(unittest.TestCase): BOOK = { 'title': 'Moby-Dick; or, The Whale', 'authors': ['Herman Melville'], } def test_builder(self): book = books.MallocMessageBuilder().init_root(books.Book) book.title = self.BOOK['title'] book.authors = self.BOOK['authors'] self.assertEqual(self.BOOK, book._as_dict()) book = book._as_reader() self.assertEqual(self.BOOK['title'], book.title) self.assertEqual(self.BOOK['authors'], book.authors._as_dict()) self.assertEqual(self.BOOK, book._as_dict()) def test_write(self): builder = books.MallocMessageBuilder() book = builder.init_root(books.Book) book.title = self.BOOK['title'] book.authors = self.BOOK['authors'] for read_cls, write_func in [ ('StreamFdMessageReader', 'write_to'), ('PackedFdMessageReader', 'write_packed_to')]: with self.subTest(read_cls=read_cls, write_func=write_func): fd, path = tempfile.mkstemp() try: getattr(builder, write_func)(fd) os.close(fd) fd = os.open(path, os.O_RDONLY) reader = getattr(books, read_cls)(fd) book = reader.get_root(books.Book) self.assertEqual(self.BOOK, book._as_dict()) finally: os.unlink(path) os.close(fd) if __name__ == '__main__': // ... rest of the code ...
bb17b5e7ef14be5e038e938011d7e50981d0e049
iv/lv5/radio/block_size.h
iv/lv5/radio/block_size.h
namespace iv { namespace lv5 { namespace radio { class Block; static const std::size_t kBlockSize = core::Size::KB * 4; static const uintptr_t kBlockMask = ~static_cast<uintptr_t>(kBlockSize - 1); // must be 2^n size IV_STATIC_ASSERT((1 << core::math::detail::CTZ<kBlockSize>::value) == kBlockSize); } } } // namespace iv::lv5::radio #endif // IV_LV5_RADIO_BLOCK_SIZE_H_
namespace iv { namespace lv5 { namespace radio { namespace detail_block_size { template<std::size_t x> struct Is2Power { static const bool value = x > 1 && (x & (x - 1)) == 0; }; } // namespace detail_block_size class Block; static const std::size_t kBlockSize = core::Size::KB * 4; static const uintptr_t kBlockMask = ~static_cast<uintptr_t>(kBlockSize - 1); // must be 2^n size IV_STATIC_ASSERT(detail_block_size::Is2Power<kBlockSize>::value); } } } // namespace iv::lv5::radio #endif // IV_LV5_RADIO_BLOCK_SIZE_H_
Use Is2Power instead of ctz
Use Is2Power instead of ctz
C
bsd-2-clause
Constellation/iv,Constellation/iv,Constellation/iv,Constellation/iv
c
## Code Before: namespace iv { namespace lv5 { namespace radio { class Block; static const std::size_t kBlockSize = core::Size::KB * 4; static const uintptr_t kBlockMask = ~static_cast<uintptr_t>(kBlockSize - 1); // must be 2^n size IV_STATIC_ASSERT((1 << core::math::detail::CTZ<kBlockSize>::value) == kBlockSize); } } } // namespace iv::lv5::radio #endif // IV_LV5_RADIO_BLOCK_SIZE_H_ ## Instruction: Use Is2Power instead of ctz ## Code After: namespace iv { namespace lv5 { namespace radio { namespace detail_block_size { template<std::size_t x> struct Is2Power { static const bool value = x > 1 && (x & (x - 1)) == 0; }; } // namespace detail_block_size class Block; static const std::size_t kBlockSize = core::Size::KB * 4; static const uintptr_t kBlockMask = ~static_cast<uintptr_t>(kBlockSize - 1); // must be 2^n size IV_STATIC_ASSERT(detail_block_size::Is2Power<kBlockSize>::value); } } } // namespace iv::lv5::radio #endif // IV_LV5_RADIO_BLOCK_SIZE_H_
... namespace iv { namespace lv5 { namespace radio { namespace detail_block_size { template<std::size_t x> struct Is2Power { static const bool value = x > 1 && (x & (x - 1)) == 0; }; } // namespace detail_block_size class Block; ... static const uintptr_t kBlockMask = ~static_cast<uintptr_t>(kBlockSize - 1); // must be 2^n size IV_STATIC_ASSERT(detail_block_size::Is2Power<kBlockSize>::value); } } } // namespace iv::lv5::radio #endif // IV_LV5_RADIO_BLOCK_SIZE_H_ ...
2c95054842db106883a400e5d040aafc31b123dd
comics/meta/base.py
comics/meta/base.py
import datetime as dt from comics.core.models import Comic class MetaBase(object): # Required values name = None language = None url = None # Default values start_date = None end_date = None rights = '' @property def slug(self): return self.__module__.split('.')[-1] def create_comic(self): if Comic.objects.filter(slug=self.slug).count(): comic = Comic.objects.get(slug=self.slug) comic.name = self.name comic.language = self.language comic.url = self.url else: comic = Comic( name=self.name, slug=self.slug, language=self.language, url=self.url) comic.start_date = self._get_date(self.start_date) comic.end_date = self._get_date(self.end_date) comic.rights = self.rights comic.save() def _get_date(self, date): if date is None: return None return dt.datetime.strptime(date, '%Y-%m-%d').date()
import datetime as dt from comics.core.models import Comic class MetaBase(object): # Required values name = None language = None url = None # Default values active = True start_date = None end_date = None rights = '' @property def slug(self): return self.__module__.split('.')[-1] def create_comic(self): if Comic.objects.filter(slug=self.slug).count(): comic = Comic.objects.get(slug=self.slug) comic.name = self.name comic.language = self.language comic.url = self.url else: comic = Comic( name=self.name, slug=self.slug, language=self.language, url=self.url) comic.active = self.active comic.start_date = self._get_date(self.start_date) comic.end_date = self._get_date(self.end_date) comic.rights = self.rights comic.save() def _get_date(self, date): if date is None: return None return dt.datetime.strptime(date, '%Y-%m-%d').date()
Add new boolean field MetaBase.active
Add new boolean field MetaBase.active
Python
agpl-3.0
jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,datagutten/comics,klette/comics,klette/comics,jodal/comics,datagutten/comics
python
## Code Before: import datetime as dt from comics.core.models import Comic class MetaBase(object): # Required values name = None language = None url = None # Default values start_date = None end_date = None rights = '' @property def slug(self): return self.__module__.split('.')[-1] def create_comic(self): if Comic.objects.filter(slug=self.slug).count(): comic = Comic.objects.get(slug=self.slug) comic.name = self.name comic.language = self.language comic.url = self.url else: comic = Comic( name=self.name, slug=self.slug, language=self.language, url=self.url) comic.start_date = self._get_date(self.start_date) comic.end_date = self._get_date(self.end_date) comic.rights = self.rights comic.save() def _get_date(self, date): if date is None: return None return dt.datetime.strptime(date, '%Y-%m-%d').date() ## Instruction: Add new boolean field MetaBase.active ## Code After: import datetime as dt from comics.core.models import Comic class MetaBase(object): # Required values name = None language = None url = None # Default values active = True start_date = None end_date = None rights = '' @property def slug(self): return self.__module__.split('.')[-1] def create_comic(self): if Comic.objects.filter(slug=self.slug).count(): comic = Comic.objects.get(slug=self.slug) comic.name = self.name comic.language = self.language comic.url = self.url else: comic = Comic( name=self.name, slug=self.slug, language=self.language, url=self.url) comic.active = self.active comic.start_date = self._get_date(self.start_date) comic.end_date = self._get_date(self.end_date) comic.rights = self.rights comic.save() def _get_date(self, date): if date is None: return None return dt.datetime.strptime(date, '%Y-%m-%d').date()
# ... existing code ... url = None # Default values active = True start_date = None end_date = None rights = '' # ... modified code ... slug=self.slug, language=self.language, url=self.url) comic.active = self.active comic.start_date = self._get_date(self.start_date) comic.end_date = self._get_date(self.end_date) comic.rights = self.rights # ... rest of the code ...
16ec04814733c1344a3a7dff13cb77539d90c866
projects/OG-LiveData/src/main/java/com/opengamma/livedata/normalization/UnitChange.java
projects/OG-LiveData/src/main/java/com/opengamma/livedata/normalization/UnitChange.java
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.livedata.normalization; import org.fudgemsg.MutableFudgeMsg; import com.opengamma.livedata.server.FieldHistoryStore; import com.opengamma.util.ArgumentChecker; /** * Multiplies the value of a {@code Double} field by a constant. */ public class UnitChange implements NormalizationRule { private final String _field; private final double _multiplier; public UnitChange(String field, double multiplier) { ArgumentChecker.notNull(field, "Field name"); _field = field; _multiplier = multiplier; } @Override public MutableFudgeMsg apply(MutableFudgeMsg msg, String securityUniqueId, FieldHistoryStore fieldHistory) { return multiplyField(msg, _field, _multiplier); } /*package*/ static MutableFudgeMsg multiplyField(MutableFudgeMsg msg, String field, double multiplier) { Double value = msg.getDouble(field); if (value != null) { double newValue = value * multiplier; msg.remove(field); msg.add(field, newValue); } return msg; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.livedata.normalization; import java.util.Set; import org.fudgemsg.MutableFudgeMsg; import com.google.common.collect.ImmutableSet; import com.opengamma.livedata.server.FieldHistoryStore; import com.opengamma.util.ArgumentChecker; /** * Multiplies the value of a {@code Double} field by a constant. */ public class UnitChange implements NormalizationRule { private final Set<String> _fields; private final double _multiplier; public UnitChange(String field, double multiplier) { ArgumentChecker.notNull(field, "Field name"); _fields = ImmutableSet.of(field); _multiplier = multiplier; } public UnitChange(Set<String> fields, double multiplier) { ArgumentChecker.notNull(fields, "Field names"); _fields = fields; _multiplier = multiplier; } public UnitChange(double multiplier, String... fields) { ArgumentChecker.notNull(fields, "fields"); _fields = ImmutableSet.copyOf(fields); _multiplier = multiplier; } @Override public MutableFudgeMsg apply(MutableFudgeMsg msg, String securityUniqueId, FieldHistoryStore fieldHistory) { return multiplyFields(msg, _fields, _multiplier); } private static MutableFudgeMsg multiplyFields(MutableFudgeMsg msg, Set<String> fields, double multiplier) { for (String field : fields) { Double value = msg.getDouble(field); if (value != null) { double newValue = value * multiplier; msg.remove(field); msg.add(field, newValue); } } return msg; } }
Support adjustment of multiple fields
[PLAT-4419] Support adjustment of multiple fields
Java
apache-2.0
codeaudit/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,nssales/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform
java
## Code Before: /** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.livedata.normalization; import org.fudgemsg.MutableFudgeMsg; import com.opengamma.livedata.server.FieldHistoryStore; import com.opengamma.util.ArgumentChecker; /** * Multiplies the value of a {@code Double} field by a constant. */ public class UnitChange implements NormalizationRule { private final String _field; private final double _multiplier; public UnitChange(String field, double multiplier) { ArgumentChecker.notNull(field, "Field name"); _field = field; _multiplier = multiplier; } @Override public MutableFudgeMsg apply(MutableFudgeMsg msg, String securityUniqueId, FieldHistoryStore fieldHistory) { return multiplyField(msg, _field, _multiplier); } /*package*/ static MutableFudgeMsg multiplyField(MutableFudgeMsg msg, String field, double multiplier) { Double value = msg.getDouble(field); if (value != null) { double newValue = value * multiplier; msg.remove(field); msg.add(field, newValue); } return msg; } } ## Instruction: [PLAT-4419] Support adjustment of multiple fields ## Code After: /** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.livedata.normalization; import java.util.Set; import org.fudgemsg.MutableFudgeMsg; import com.google.common.collect.ImmutableSet; import com.opengamma.livedata.server.FieldHistoryStore; import com.opengamma.util.ArgumentChecker; /** * Multiplies the value of a {@code Double} field by a constant. */ public class UnitChange implements NormalizationRule { private final Set<String> _fields; private final double _multiplier; public UnitChange(String field, double multiplier) { ArgumentChecker.notNull(field, "Field name"); _fields = ImmutableSet.of(field); _multiplier = multiplier; } public UnitChange(Set<String> fields, double multiplier) { ArgumentChecker.notNull(fields, "Field names"); _fields = fields; _multiplier = multiplier; } public UnitChange(double multiplier, String... fields) { ArgumentChecker.notNull(fields, "fields"); _fields = ImmutableSet.copyOf(fields); _multiplier = multiplier; } @Override public MutableFudgeMsg apply(MutableFudgeMsg msg, String securityUniqueId, FieldHistoryStore fieldHistory) { return multiplyFields(msg, _fields, _multiplier); } private static MutableFudgeMsg multiplyFields(MutableFudgeMsg msg, Set<String> fields, double multiplier) { for (String field : fields) { Double value = msg.getDouble(field); if (value != null) { double newValue = value * multiplier; msg.remove(field); msg.add(field, newValue); } } return msg; } }
... */ package com.opengamma.livedata.normalization; import java.util.Set; import org.fudgemsg.MutableFudgeMsg; import com.google.common.collect.ImmutableSet; import com.opengamma.livedata.server.FieldHistoryStore; import com.opengamma.util.ArgumentChecker; ... */ public class UnitChange implements NormalizationRule { private final Set<String> _fields; private final double _multiplier; public UnitChange(String field, double multiplier) { ArgumentChecker.notNull(field, "Field name"); _fields = ImmutableSet.of(field); _multiplier = multiplier; } public UnitChange(Set<String> fields, double multiplier) { ArgumentChecker.notNull(fields, "Field names"); _fields = fields; _multiplier = multiplier; } public UnitChange(double multiplier, String... fields) { ArgumentChecker.notNull(fields, "fields"); _fields = ImmutableSet.copyOf(fields); _multiplier = multiplier; } @Override public MutableFudgeMsg apply(MutableFudgeMsg msg, String securityUniqueId, FieldHistoryStore fieldHistory) { return multiplyFields(msg, _fields, _multiplier); } private static MutableFudgeMsg multiplyFields(MutableFudgeMsg msg, Set<String> fields, double multiplier) { for (String field : fields) { Double value = msg.getDouble(field); if (value != null) { double newValue = value * multiplier; msg.remove(field); msg.add(field, newValue); } } return msg; } ...
9e7aed847c2d5fcd6e00bc787d8b3558b590f605
api/logs/urls.py
api/logs/urls.py
from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), ]
from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ]
Add /v2/logs/log_id/added_contributors/ to list of URL's.
Add /v2/logs/log_id/added_contributors/ to list of URL's.
Python
apache-2.0
abought/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,chennan47/osf.io,RomanZWang/osf.io,alexschiller/osf.io,billyhunt/osf.io,crcresearch/osf.io,saradbowman/osf.io,acshi/osf.io,jnayak1/osf.io,RomanZWang/osf.io,emetsger/osf.io,KAsante95/osf.io,zachjanicki/osf.io,mattclark/osf.io,RomanZWang/osf.io,emetsger/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,emetsger/osf.io,billyhunt/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,kwierman/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,aaxelb/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,cslzchen/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,hmoco/osf.io,erinspace/osf.io,doublebits/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,abought/osf.io,leb2dg/osf.io,adlius/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,binoculars/osf.io,GageGaskins/osf.io,hmoco/osf.io,GageGaskins/osf.io,kwierman/osf.io,hmoco/osf.io,caneruguz/osf.io,SSJohns/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,mluo613/osf.io,rdhyee/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,chennan47/osf.io,icereval/osf.io,rdhyee/osf.io,doublebits/osf.io,adlius/osf.io,caneruguz/osf.io,amyshi188/osf.io,jnayak1/osf.io,mluke93/osf.io,erinspace/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,laurenrevere/osf.io,acshi/osf.io,Johnetordoff/osf.io,acshi/osf.io,crcresearch/osf.io,cwisecarver/osf.io,binoculars/osf.io,brianjgeiger/osf.io,sloria/osf.io,zachjanicki/osf.io,baylee-d/osf.io,KAsante95/osf.io,caseyrollins/osf.io,doublebits/osf.io,brandonPurvis/osf.io,chrisseto/osf.io,mattclark/osf.io,pattisdr/osf.io,baylee-d/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,icereval/osf.io,wearpants/osf.io,aaxelb/osf.io,caseyrollins/osf.io,erinspace/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,mluke93/osf.io,leb2dg/osf.io,Nesiehr/osf.io,amyshi188/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sloria/osf.io,kwierman/osf.io,samchrisinger/osf.io,doublebits/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,mluke93/osf.io,mfraezz/osf.io,saradbowman/osf.io,kch8qx/osf.io,KAsante95/osf.io,cwisecarver/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,chrisseto/osf.io,acshi/osf.io,amyshi188/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,mattclark/osf.io,cslzchen/osf.io,Nesiehr/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,acshi/osf.io,cwisecarver/osf.io,kwierman/osf.io,abought/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,felliott/osf.io,adlius/osf.io,felliott/osf.io,jnayak1/osf.io,binoculars/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,billyhunt/osf.io,abought/osf.io,mluo613/osf.io,zamattiac/osf.io,GageGaskins/osf.io,mluo613/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,hmoco/osf.io,wearpants/osf.io,TomBaxter/osf.io,aaxelb/osf.io,alexschiller/osf.io,caseyrollins/osf.io,mfraezz/osf.io,doublebits/osf.io,zamattiac/osf.io,sloria/osf.io,pattisdr/osf.io,pattisdr/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,felliott/osf.io,monikagrabowska/osf.io,wearpants/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,adlius/osf.io,emetsger/osf.io,RomanZWang/osf.io,chrisseto/osf.io,kch8qx/osf.io,billyhunt/osf.io,chennan47/osf.io,kch8qx/osf.io,icereval/osf.io,TomHeatwole/osf.io,mluo613/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,kch8qx/osf.io
python
## Code Before: from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), ] ## Instruction: Add /v2/logs/log_id/added_contributors/ to list of URL's. ## Code After: from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ]
// ... existing code ... urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ] // ... rest of the code ...
db4ccce9e418a1227532bde8834ca682bc873609
system/t04_mirror/show.py
system/t04_mirror/show.py
from lib import BaseTest class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib"
from lib import BaseTest import re class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s)
Remove updated at while comparing.
Remove updated at while comparing.
Python
mit
gearmover/aptly,bsundsrud/aptly,adfinis-forks/aptly,vincentbernat/aptly,gdbdzgd/aptly,ceocoder/aptly,adfinis-forks/aptly,seaninspace/aptly,neolynx/aptly,scalp42/aptly,gdbdzgd/aptly,sobczyk/aptly,neolynx/aptly,scalp42/aptly,aptly-dev/aptly,seaninspace/aptly,aptly-dev/aptly,bsundsrud/aptly,gdbdzgd/aptly,bankonme/aptly,adfinis-forks/aptly,sobczyk/aptly,seaninspace/aptly,vincentbernat/aptly,smira/aptly,jola5/aptly,scalp42/aptly,smira/aptly,ceocoder/aptly,gearmover/aptly,bankonme/aptly,bsundsrud/aptly,vincentbernat/aptly,ceocoder/aptly,jola5/aptly,jola5/aptly,aptly-dev/aptly,gearmover/aptly,sobczyk/aptly,neolynx/aptly,smira/aptly,bankonme/aptly
python
## Code Before: from lib import BaseTest class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" ## Instruction: Remove updated at while comparing. ## Code After: from lib import BaseTest import re class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s)
... from lib import BaseTest import re class ShowMirror1Test(BaseTest): ... """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s) ...
0fb16c44b13ca467fb8ede67bdc93450712cb2bb
test/tiles/hitile_test.py
test/tiles/hitile_test.py
import dask.array as da import h5py import clodius.tiles.hitile as hghi import numpy as np import os.path as op import tempfile def test_hitile(): array_size = int(1e6) chunk_size = 2**19 data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,)) with tempfile.TemporaryDirectory() as td: output_file = op.join(td, 'blah.hitile') hghi.array_to_hitile(data, output_file, zoom_step=6) with h5py.File(output_file, 'r') as f: (means, mins, maxs) = hghi.get_data(f, 0, 0) # print("means, mins:", means[:10], mins[:10], maxs[:10])
import dask.array as da import h5py import clodius.tiles.hitile as hghi import numpy as np import os.path as op import tempfile def test_hitile(): array_size = int(1e6) chunk_size = 2**19 data = np.random.random((array_size,)) with tempfile.TemporaryDirectory() as td: output_file = op.join(td, 'blah.hitile') hghi.array_to_hitile( data, output_file, zoom_step=6, chunks=(chunk_size,) ) with h5py.File(output_file, 'r') as f: (means, mins, maxs) = hghi.get_data(f, 0, 0) # print("means, mins:", means[:10], mins[:10], maxs[:10])
Fix error of applying dask twice
Fix error of applying dask twice
Python
mit
hms-dbmi/clodius,hms-dbmi/clodius
python
## Code Before: import dask.array as da import h5py import clodius.tiles.hitile as hghi import numpy as np import os.path as op import tempfile def test_hitile(): array_size = int(1e6) chunk_size = 2**19 data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,)) with tempfile.TemporaryDirectory() as td: output_file = op.join(td, 'blah.hitile') hghi.array_to_hitile(data, output_file, zoom_step=6) with h5py.File(output_file, 'r') as f: (means, mins, maxs) = hghi.get_data(f, 0, 0) # print("means, mins:", means[:10], mins[:10], maxs[:10]) ## Instruction: Fix error of applying dask twice ## Code After: import dask.array as da import h5py import clodius.tiles.hitile as hghi import numpy as np import os.path as op import tempfile def test_hitile(): array_size = int(1e6) chunk_size = 2**19 data = np.random.random((array_size,)) with tempfile.TemporaryDirectory() as td: output_file = op.join(td, 'blah.hitile') hghi.array_to_hitile( data, output_file, zoom_step=6, chunks=(chunk_size,) ) with h5py.File(output_file, 'r') as f: (means, mins, maxs) = hghi.get_data(f, 0, 0) # print("means, mins:", means[:10], mins[:10], maxs[:10])
// ... existing code ... array_size = int(1e6) chunk_size = 2**19 data = np.random.random((array_size,)) with tempfile.TemporaryDirectory() as td: output_file = op.join(td, 'blah.hitile') hghi.array_to_hitile( data, output_file, zoom_step=6, chunks=(chunk_size,) ) with h5py.File(output_file, 'r') as f: (means, mins, maxs) = hghi.get_data(f, 0, 0) // ... rest of the code ...
cc8f1507c90261947d9520859922bff44ef9c6b4
observatory/lib/InheritanceQuerySet.py
observatory/lib/InheritanceQuerySet.py
from django.db.models.query import QuerySet from django.db.models.fields.related import SingleRelatedObjectDescriptor class InheritanceQuerySet(QuerySet): def select_subclasses(self, *subclasses): if not subclasses: subclasses = [o for o in dir(self.model) if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\ and issubclass(getattr(self.model,o).related.model, self.model)] new_qs = self.select_related(*subclasses) new_qs.subclasses = subclasses return new_qs def _clone(self, klass=None, setup=False, **kwargs): try: kwargs.update({'subclasses': self.subclasses}) except AttributeError: pass return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs) def iterator(self): iter = super(InheritanceQuerySet, self).iterator() if getattr(self, 'subclasses', False): for obj in iter: obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj] yield obj[0] else: for obj in iter: yield obj
from django.db.models.query import QuerySet from django.db.models.fields.related import SingleRelatedObjectDescriptor from django.core.exceptions import ObjectDoesNotExist class InheritanceQuerySet(QuerySet): def select_subclasses(self, *subclasses): if not subclasses: subclasses = [o for o in dir(self.model) if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\ and issubclass(getattr(self.model,o).related.model, self.model)] new_qs = self.select_related(*subclasses) new_qs.subclasses = subclasses return new_qs def _clone(self, klass=None, setup=False, **kwargs): try: kwargs.update({'subclasses': self.subclasses}) except AttributeError: pass return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs) def _get_subclasses(self, obj): result = [] for s in getattr(self, 'subclassses', []): try: if getattr(obj, s): result += getattr(obj, s) except ObjectDoesNotExist: continue return result or [obj] def iterator(self): iter = super(InheritanceQuerySet, self).iterator() if getattr(self, 'subclasses', False): for obj in iter: yield self._get_subclasses(obj)[0] else: for obj in iter: yield obj
Fix the feed to work with new versions of django
Fix the feed to work with new versions of django
Python
isc
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
python
## Code Before: from django.db.models.query import QuerySet from django.db.models.fields.related import SingleRelatedObjectDescriptor class InheritanceQuerySet(QuerySet): def select_subclasses(self, *subclasses): if not subclasses: subclasses = [o for o in dir(self.model) if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\ and issubclass(getattr(self.model,o).related.model, self.model)] new_qs = self.select_related(*subclasses) new_qs.subclasses = subclasses return new_qs def _clone(self, klass=None, setup=False, **kwargs): try: kwargs.update({'subclasses': self.subclasses}) except AttributeError: pass return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs) def iterator(self): iter = super(InheritanceQuerySet, self).iterator() if getattr(self, 'subclasses', False): for obj in iter: obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj] yield obj[0] else: for obj in iter: yield obj ## Instruction: Fix the feed to work with new versions of django ## Code After: from django.db.models.query import QuerySet from django.db.models.fields.related import SingleRelatedObjectDescriptor from django.core.exceptions import ObjectDoesNotExist class InheritanceQuerySet(QuerySet): def select_subclasses(self, *subclasses): if not subclasses: subclasses = [o for o in dir(self.model) if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\ and issubclass(getattr(self.model,o).related.model, self.model)] new_qs = self.select_related(*subclasses) new_qs.subclasses = subclasses return new_qs def _clone(self, klass=None, setup=False, **kwargs): try: kwargs.update({'subclasses': self.subclasses}) except AttributeError: pass return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs) def _get_subclasses(self, obj): result = [] for s in getattr(self, 'subclassses', []): try: if getattr(obj, s): result += getattr(obj, s) except ObjectDoesNotExist: continue return result or [obj] def iterator(self): iter = super(InheritanceQuerySet, self).iterator() if getattr(self, 'subclasses', False): for obj in iter: yield self._get_subclasses(obj)[0] else: for obj in iter: yield obj
# ... existing code ... from django.db.models.query import QuerySet from django.db.models.fields.related import SingleRelatedObjectDescriptor from django.core.exceptions import ObjectDoesNotExist class InheritanceQuerySet(QuerySet): def select_subclasses(self, *subclasses): # ... modified code ... pass return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs) def _get_subclasses(self, obj): result = [] for s in getattr(self, 'subclassses', []): try: if getattr(obj, s): result += getattr(obj, s) except ObjectDoesNotExist: continue return result or [obj] def iterator(self): iter = super(InheritanceQuerySet, self).iterator() if getattr(self, 'subclasses', False): for obj in iter: yield self._get_subclasses(obj)[0] else: for obj in iter: yield obj # ... rest of the code ...
7e0ef4ba74bf2d6ea93f49d88c58378e7a1f9106
fabfile.py
fabfile.py
from fusionbox.fabric_helpers import * env.roledefs = { 'dev': ['dev.fusionbox.com'], } env.project_name = 'django-widgy' env.short_name = 'widgy' env.tld = '' def stage_with_docs(pip=False, migrate=False, syncdb=False, branch=None): stage(pip=pip, migrate=migrate, syncdb=syncdb, branch=branch) with cd('/var/www/%s%s/doc' % (env.project_name, env.tld)): with virtualenv(env.short_name): run("make html") stage = roles('dev')(stage) dstage = roles('dev')(stage_with_docs)
from fusionbox.fabric_helpers import * env.roledefs = { 'dev': ['dev.fusionbox.com'], } env.project_name = 'django-widgy' env.short_name = 'widgy' env.tld = '' _stage = stage def stage(pip=False, migrate=False, syncdb=False, branch=None): _stage(pip=pip, migrate=migrate, syncdb=syncdb, branch=branch) with cd('/var/www/%s%s/doc' % (env.project_name, env.tld)): with virtualenv(env.short_name): run("make html") stage = roles('dev')(stage)
Make this one role, stage
Make this one role, stage
Python
apache-2.0
j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy
python
## Code Before: from fusionbox.fabric_helpers import * env.roledefs = { 'dev': ['dev.fusionbox.com'], } env.project_name = 'django-widgy' env.short_name = 'widgy' env.tld = '' def stage_with_docs(pip=False, migrate=False, syncdb=False, branch=None): stage(pip=pip, migrate=migrate, syncdb=syncdb, branch=branch) with cd('/var/www/%s%s/doc' % (env.project_name, env.tld)): with virtualenv(env.short_name): run("make html") stage = roles('dev')(stage) dstage = roles('dev')(stage_with_docs) ## Instruction: Make this one role, stage ## Code After: from fusionbox.fabric_helpers import * env.roledefs = { 'dev': ['dev.fusionbox.com'], } env.project_name = 'django-widgy' env.short_name = 'widgy' env.tld = '' _stage = stage def stage(pip=False, migrate=False, syncdb=False, branch=None): _stage(pip=pip, migrate=migrate, syncdb=syncdb, branch=branch) with cd('/var/www/%s%s/doc' % (env.project_name, env.tld)): with virtualenv(env.short_name): run("make html") stage = roles('dev')(stage)
// ... existing code ... env.tld = '' _stage = stage def stage(pip=False, migrate=False, syncdb=False, branch=None): _stage(pip=pip, migrate=migrate, syncdb=syncdb, branch=branch) with cd('/var/www/%s%s/doc' % (env.project_name, env.tld)): with virtualenv(env.short_name): run("make html") stage = roles('dev')(stage) // ... rest of the code ...
97d2308b5a7b5d17fd9ebe2d01692bc9d61c34f1
app/src/test/java/amu/zhcetstudent/ExampleUnitTest.java
app/src/test/java/amu/zhcetstudent/ExampleUnitTest.java
package amu.zhcetstudent; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
package amu.zhcetstudent; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(JUnit4.class) public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
Add JUnit4 Runner to test
fix: Add JUnit4 Runner to test
Java
apache-2.0
divs4debu/zhcet-students
java
## Code Before: package amu.zhcetstudent; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } } ## Instruction: fix: Add JUnit4 Runner to test ## Code After: package amu.zhcetstudent; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(JUnit4.class) public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
# ... existing code ... package amu.zhcetstudent; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import static org.junit.Assert.*; # ... modified code ... * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(JUnit4.class) public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { # ... rest of the code ...
7c4141ee2bfc3256ee3ae4ddd55468687abee6a2
software/nbia-dao/test/gov/nih/nci/nbia/dto/ImageDTOTestCase.java
software/nbia-dao/test/gov/nih/nci/nbia/dto/ImageDTOTestCase.java
/** * */ package gov.nih.nci.nbia.dto; import junit.framework.TestCase; /** * @author lethai * */ public class ImageDTOTestCase extends TestCase { public void testAccessors() { String SOPInstanceUID="1.2.3.4.5.6"; String fileName="1.2.3.4.5.6.7.dcm"; Long dicomSize = new Long(514); String project="RIDER"; String site="RIDER"; String ssg="test"; ImageDTO imageDTO = new ImageDTO(SOPInstanceUID, fileName, dicomSize, project, site, ssg); assertTrue(imageDTO.getSOPInstanceUID().equals("1.2.3.4.5.6")); assertTrue(imageDTO.getFileName().equals("1.2.3.4.5.6.7.dcm")); assertTrue(imageDTO.getDicomSize() ==514L); assertTrue(imageDTO.getProject().equals("RIDER")); assertTrue(imageDTO.getSite().equals("RIDER")); assertTrue(imageDTO.getSsg().equals("test")); } }
/** * */ package gov.nih.nci.nbia.dto; import junit.framework.TestCase; /** * @author lethai * */ public class ImageDTOTestCase extends TestCase { public void testAccessors() { String SOPInstanceUID="1.2.3.4.5.6"; String fileName="1.2.3.4.5.6.7.dcm"; Long dicomSize = new Long(514); String project="RIDER"; String site="RIDER"; String ssg="test"; int frameNum = 0; ImageDTO imageDTO = new ImageDTO(SOPInstanceUID, fileName, dicomSize, project, site, ssg, 0); assertTrue(imageDTO.getSOPInstanceUID().equals("1.2.3.4.5.6")); assertTrue(imageDTO.getFileName().equals("1.2.3.4.5.6.7.dcm")); assertTrue(imageDTO.getDicomSize() ==514L); assertTrue(imageDTO.getProject().equals("RIDER")); assertTrue(imageDTO.getSite().equals("RIDER")); assertTrue(imageDTO.getSsg().equals("test")); } }
Make it compatible for updated ImageDTO class.
Make it compatible for updated ImageDTO class.
Java
bsd-3-clause
NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive
java
## Code Before: /** * */ package gov.nih.nci.nbia.dto; import junit.framework.TestCase; /** * @author lethai * */ public class ImageDTOTestCase extends TestCase { public void testAccessors() { String SOPInstanceUID="1.2.3.4.5.6"; String fileName="1.2.3.4.5.6.7.dcm"; Long dicomSize = new Long(514); String project="RIDER"; String site="RIDER"; String ssg="test"; ImageDTO imageDTO = new ImageDTO(SOPInstanceUID, fileName, dicomSize, project, site, ssg); assertTrue(imageDTO.getSOPInstanceUID().equals("1.2.3.4.5.6")); assertTrue(imageDTO.getFileName().equals("1.2.3.4.5.6.7.dcm")); assertTrue(imageDTO.getDicomSize() ==514L); assertTrue(imageDTO.getProject().equals("RIDER")); assertTrue(imageDTO.getSite().equals("RIDER")); assertTrue(imageDTO.getSsg().equals("test")); } } ## Instruction: Make it compatible for updated ImageDTO class. ## Code After: /** * */ package gov.nih.nci.nbia.dto; import junit.framework.TestCase; /** * @author lethai * */ public class ImageDTOTestCase extends TestCase { public void testAccessors() { String SOPInstanceUID="1.2.3.4.5.6"; String fileName="1.2.3.4.5.6.7.dcm"; Long dicomSize = new Long(514); String project="RIDER"; String site="RIDER"; String ssg="test"; int frameNum = 0; ImageDTO imageDTO = new ImageDTO(SOPInstanceUID, fileName, dicomSize, project, site, ssg, 0); assertTrue(imageDTO.getSOPInstanceUID().equals("1.2.3.4.5.6")); assertTrue(imageDTO.getFileName().equals("1.2.3.4.5.6.7.dcm")); assertTrue(imageDTO.getDicomSize() ==514L); assertTrue(imageDTO.getProject().equals("RIDER")); assertTrue(imageDTO.getSite().equals("RIDER")); assertTrue(imageDTO.getSsg().equals("test")); } }
// ... existing code ... String project="RIDER"; String site="RIDER"; String ssg="test"; int frameNum = 0; ImageDTO imageDTO = new ImageDTO(SOPInstanceUID, fileName, dicomSize, project, site, ssg, 0); assertTrue(imageDTO.getSOPInstanceUID().equals("1.2.3.4.5.6")); assertTrue(imageDTO.getFileName().equals("1.2.3.4.5.6.7.dcm")); // ... rest of the code ...
bfbc156d9efca37c35d18481c4366d3e6deed1ba
slave/skia_slave_scripts/chromeos_run_bench.py
slave/skia_slave_scripts/chromeos_run_bench.py
""" Run the Skia bench executable. """ from build_step import BuildStep, BuildStepWarning from chromeos_build_step import ChromeOSBuildStep from run_bench import RunBench import sys class ChromeOSRunBench(ChromeOSBuildStep, RunBench): def _Run(self): # TODO(borenet): Re-enable this step once the crash is fixed. # RunBench._Run(self) raise BuildStepWarning('Skipping bench on ChromeOS until crash is fixed.') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSRunBench))
""" Run the Skia bench executable. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from run_bench import RunBench import sys class ChromeOSRunBench(ChromeOSBuildStep, RunBench): pass if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSRunBench))
Stop skipping Bench on ChromeOS
Stop skipping Bench on ChromeOS (RunBuilders:Skia_ChromeOS_Alex_Debug_32) Unreviewed. git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8094 2bbb7eff-a529-9590-31e7-b0007b416f81
Python
bsd-3-clause
Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot
python
## Code Before: """ Run the Skia bench executable. """ from build_step import BuildStep, BuildStepWarning from chromeos_build_step import ChromeOSBuildStep from run_bench import RunBench import sys class ChromeOSRunBench(ChromeOSBuildStep, RunBench): def _Run(self): # TODO(borenet): Re-enable this step once the crash is fixed. # RunBench._Run(self) raise BuildStepWarning('Skipping bench on ChromeOS until crash is fixed.') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSRunBench)) ## Instruction: Stop skipping Bench on ChromeOS (RunBuilders:Skia_ChromeOS_Alex_Debug_32) Unreviewed. git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8094 2bbb7eff-a529-9590-31e7-b0007b416f81 ## Code After: """ Run the Skia bench executable. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from run_bench import RunBench import sys class ChromeOSRunBench(ChromeOSBuildStep, RunBench): pass if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSRunBench))
... """ Run the Skia bench executable. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from run_bench import RunBench import sys ... class ChromeOSRunBench(ChromeOSBuildStep, RunBench): pass if '__main__' == __name__: ...
7f5ca544d81d3cbdf1c9c0d3101f1c699ffde17c
src/java/ie/omk/smpp/event/QueueFullException.java
src/java/ie/omk/smpp/event/QueueFullException.java
package ie.omk.smpp.event; public class QueueFullException extends RuntimeException { static final long serialVersionUID = 1L; }
package ie.omk.smpp.event; public class QueueFullException extends RuntimeException { static final long serialVersionUID = 1L; public QueueFullException() { super(); } public QueueFullException(String message, Throwable cause) { super(message, cause); } public QueueFullException(String message) { super(message); } public QueueFullException(Throwable cause) { super(cause); } }
Add standard constructors from parent class.
Add standard constructors from parent class.
Java
bsd-3-clause
oranoceallaigh/smppapi,oranoceallaigh/smppapi,oranoceallaigh/smppapi
java
## Code Before: package ie.omk.smpp.event; public class QueueFullException extends RuntimeException { static final long serialVersionUID = 1L; } ## Instruction: Add standard constructors from parent class. ## Code After: package ie.omk.smpp.event; public class QueueFullException extends RuntimeException { static final long serialVersionUID = 1L; public QueueFullException() { super(); } public QueueFullException(String message, Throwable cause) { super(message, cause); } public QueueFullException(String message) { super(message); } public QueueFullException(Throwable cause) { super(cause); } }
# ... existing code ... public class QueueFullException extends RuntimeException { static final long serialVersionUID = 1L; public QueueFullException() { super(); } public QueueFullException(String message, Throwable cause) { super(message, cause); } public QueueFullException(String message) { super(message); } public QueueFullException(Throwable cause) { super(cause); } } # ... rest of the code ...
e159465d4495ed2ebcbd1515d82f4f85fc28c8f7
corral/views/private.py
corral/views/private.py
"""These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Method not allowed"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
"""These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Not Found"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
Use a better example error message
Use a better example error message
Python
mit
nickfrostatx/corral,nickfrostatx/corral,nickfrostatx/corral
python
## Code Before: """These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Method not allowed"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response ## Instruction: Use a better example error message ## Code After: """These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Not Found"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
# ... existing code ... @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Not Found"}.""" return jsonify({'msg': e.name}), e.code # ... rest of the code ...
98bf53dd350869e31c89f14cb0ebfa6a467dd0ec
events/migrations/0017_auto_20160208_1729.py
events/migrations/0017_auto_20160208_1729.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('events', '0016_auto_20160205_1754'), ] operations = [ migrations.AlterField( model_name='image', name='url', field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('events', '0016_auto_20160205_1754'), ] operations = [ migrations.AlterField( model_name='event', name='external_image_url', field=models.URLField(blank=True, max_length=400, null=True), ), migrations.AlterField( model_name='image', name='url', field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'), ), ]
Revert "Remove redundant migration operation."
Revert "Remove redundant migration operation." This reverts commit 9d34264d275acd32122de3567e60b24a417d6098.
Python
mit
City-of-Helsinki/linkedevents,aapris/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents,tuomas777/linkedevents,aapris/linkedevents
python
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('events', '0016_auto_20160205_1754'), ] operations = [ migrations.AlterField( model_name='image', name='url', field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'), ), ] ## Instruction: Revert "Remove redundant migration operation." This reverts commit 9d34264d275acd32122de3567e60b24a417d6098. ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('events', '0016_auto_20160205_1754'), ] operations = [ migrations.AlterField( model_name='event', name='external_image_url', field=models.URLField(blank=True, max_length=400, null=True), ), migrations.AlterField( model_name='image', name='url', field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'), ), ]
... operations = [ migrations.AlterField( model_name='event', name='external_image_url', field=models.URLField(blank=True, max_length=400, null=True), ), migrations.AlterField( model_name='image', name='url', field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'), ...
ffe9bba2e4045236a3f3731e39876b6220f8f9a1
jarviscli/plugins/joke_of_day.py
jarviscli/plugins/joke_of_day.py
from plugin import plugin, require import requests from colorama import Fore from plugins.animations import SpinnerThread @require(network=True) @plugin('joke daily') class joke_of_day: """ Provides you with a joke of day to help you laugh amidst the daily boring schedule Enter 'joke daily' to use """ def __call__(self, jarvis, s): jarvis.say("Welcome To The Plugin Joke Of Day!", Fore.CYAN) jarvis.say("Jokes provided by jokes.one API", Fore.CYAN, False) print() joke_fetch = self.get_joke(jarvis) if joke_fetch is not None: self.joke(jarvis, joke_fetch) def get_joke(self, jarvis): spinner = SpinnerThread('Fetching ', 0.15) while True: url = "https://api.jokes.one/jod" spinner.start() r = requests.get(url) if r is None: spinner.stop() jarvis.say( "Error in fetching joke - try again! later", Fore.RED) spinner.stop() return r.json() def joke(self, jarvis, joke_fetch): title = joke_fetch["contents"]["jokes"][0]["joke"]["title"] joke = joke_fetch["contents"]["jokes"][0]["joke"]["text"] print() jarvis.say("Title: " + title, Fore.BLUE) print() jarvis.say(joke, Fore.YELLOW)
from plugin import plugin, require import requests from colorama import Fore @require(network=True) @plugin('joke daily') class joke_of_day: """ Provides you with a joke of day to help you laugh amidst the daily boring schedule Enter 'joke daily' to use """ def __call__(self, jarvis, s): jarvis.say("Welcome To The Plugin Joke Of Day!", Fore.CYAN) jarvis.say("Jokes provided by jokes.one API", Fore.CYAN, False) print() joke_fetch = self.get_joke(jarvis) if joke_fetch is not None: self.joke(jarvis, joke_fetch) def get_joke(self, jarvis): while True: url = "https://api.jokes.one/jod" jarvis.spinner_start('Fetching') r = requests.get(url) if r is None: spinner.stop() jarvis.say( "Error in fetching joke - try again! later", Fore.RED) jarvis.spinner_stop() return r.json() def joke(self, jarvis, joke_fetch): title = joke_fetch["contents"]["jokes"][0]["joke"]["title"] joke = joke_fetch["contents"]["jokes"][0]["joke"]["text"] print() jarvis.say("Title: " + title, Fore.BLUE) print() jarvis.say(joke, Fore.YELLOW)
Update joke of day: Fix for moved SpinnerThread
Update joke of day: Fix for moved SpinnerThread
Python
mit
sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis
python
## Code Before: from plugin import plugin, require import requests from colorama import Fore from plugins.animations import SpinnerThread @require(network=True) @plugin('joke daily') class joke_of_day: """ Provides you with a joke of day to help you laugh amidst the daily boring schedule Enter 'joke daily' to use """ def __call__(self, jarvis, s): jarvis.say("Welcome To The Plugin Joke Of Day!", Fore.CYAN) jarvis.say("Jokes provided by jokes.one API", Fore.CYAN, False) print() joke_fetch = self.get_joke(jarvis) if joke_fetch is not None: self.joke(jarvis, joke_fetch) def get_joke(self, jarvis): spinner = SpinnerThread('Fetching ', 0.15) while True: url = "https://api.jokes.one/jod" spinner.start() r = requests.get(url) if r is None: spinner.stop() jarvis.say( "Error in fetching joke - try again! later", Fore.RED) spinner.stop() return r.json() def joke(self, jarvis, joke_fetch): title = joke_fetch["contents"]["jokes"][0]["joke"]["title"] joke = joke_fetch["contents"]["jokes"][0]["joke"]["text"] print() jarvis.say("Title: " + title, Fore.BLUE) print() jarvis.say(joke, Fore.YELLOW) ## Instruction: Update joke of day: Fix for moved SpinnerThread ## Code After: from plugin import plugin, require import requests from colorama import Fore @require(network=True) @plugin('joke daily') class joke_of_day: """ Provides you with a joke of day to help you laugh amidst the daily boring schedule Enter 'joke daily' to use """ def __call__(self, jarvis, s): jarvis.say("Welcome To The Plugin Joke Of Day!", Fore.CYAN) jarvis.say("Jokes provided by jokes.one API", Fore.CYAN, False) print() joke_fetch = self.get_joke(jarvis) if joke_fetch is not None: self.joke(jarvis, joke_fetch) def get_joke(self, jarvis): while True: url = "https://api.jokes.one/jod" jarvis.spinner_start('Fetching') r = requests.get(url) if r is None: spinner.stop() jarvis.say( "Error in fetching joke - try again! later", Fore.RED) jarvis.spinner_stop() return r.json() def joke(self, jarvis, joke_fetch): title = joke_fetch["contents"]["jokes"][0]["joke"]["title"] joke = joke_fetch["contents"]["jokes"][0]["joke"]["text"] print() jarvis.say("Title: " + title, Fore.BLUE) print() jarvis.say(joke, Fore.YELLOW)
// ... existing code ... from plugin import plugin, require import requests from colorama import Fore @require(network=True) // ... modified code ... self.joke(jarvis, joke_fetch) def get_joke(self, jarvis): while True: url = "https://api.jokes.one/jod" jarvis.spinner_start('Fetching') r = requests.get(url) if r is None: spinner.stop() jarvis.say( "Error in fetching joke - try again! later", Fore.RED) jarvis.spinner_stop() return r.json() def joke(self, jarvis, joke_fetch): // ... rest of the code ...
564d54c377bf6a8c16cae3681934cc7ba5007c76
bundledApps/wailEndpoint.py
bundledApps/wailEndpoint.py
import tornado.ioloop import tornado.web import requests host = 'localhost' waybackPort = '8080' archiveConfigFile = '/Applications/WAIL.app/config/archive.json' class MainHandler(tornado.web.RequestHandler): def get(self): iwa = isWaybackAccessible() print iwa self.write(iwa) def make_app(): return tornado.web.Application([ (r"/", MainHandler), ]) def isWaybackAccessible(): try: r = requests.get('http://' + host + ':' + waybackPort) with open(archiveConfigFile, 'r') as myfile: data=myfile.read() return data except requests.exceptions.ConnectionError as e: return '' if __name__ == "__main__": app = make_app() app.listen(8888) tornado.ioloop.IOLoop.current().start()
import tornado.ioloop import tornado.web import requests host = 'localhost' waybackPort = '8080' # Use a separate JSON file that only queries the local WAIL instance for MemGator archiveConfigFile = '/Applications/WAIL.app/config/archive.json' class MainHandler(tornado.web.RequestHandler): def get(self): iwa = isWaybackAccessible() print iwa self.write(iwa) def make_app(): return tornado.web.Application([ (r"/", MainHandler), ]) def isWaybackAccessible(): try: r = requests.get('http://' + host + ':' + waybackPort) with open(archiveConfigFile, 'r') as myfile: data=myfile.read() return data except requests.exceptions.ConnectionError as e: return '' if __name__ == "__main__": app = make_app() app.listen(8888) tornado.ioloop.IOLoop.current().start()
Add comment to justify separate JSON file existence
Add comment to justify separate JSON file existence
Python
mit
machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail
python
## Code Before: import tornado.ioloop import tornado.web import requests host = 'localhost' waybackPort = '8080' archiveConfigFile = '/Applications/WAIL.app/config/archive.json' class MainHandler(tornado.web.RequestHandler): def get(self): iwa = isWaybackAccessible() print iwa self.write(iwa) def make_app(): return tornado.web.Application([ (r"/", MainHandler), ]) def isWaybackAccessible(): try: r = requests.get('http://' + host + ':' + waybackPort) with open(archiveConfigFile, 'r') as myfile: data=myfile.read() return data except requests.exceptions.ConnectionError as e: return '' if __name__ == "__main__": app = make_app() app.listen(8888) tornado.ioloop.IOLoop.current().start() ## Instruction: Add comment to justify separate JSON file existence ## Code After: import tornado.ioloop import tornado.web import requests host = 'localhost' waybackPort = '8080' # Use a separate JSON file that only queries the local WAIL instance for MemGator archiveConfigFile = '/Applications/WAIL.app/config/archive.json' class MainHandler(tornado.web.RequestHandler): def get(self): iwa = isWaybackAccessible() print iwa self.write(iwa) def make_app(): return tornado.web.Application([ (r"/", MainHandler), ]) def isWaybackAccessible(): try: r = requests.get('http://' + host + ':' + waybackPort) with open(archiveConfigFile, 'r') as myfile: data=myfile.read() return data except requests.exceptions.ConnectionError as e: return '' if __name__ == "__main__": app = make_app() app.listen(8888) tornado.ioloop.IOLoop.current().start()
# ... existing code ... host = 'localhost' waybackPort = '8080' # Use a separate JSON file that only queries the local WAIL instance for MemGator archiveConfigFile = '/Applications/WAIL.app/config/archive.json' class MainHandler(tornado.web.RequestHandler): # ... rest of the code ...
33b07760827633cdf76ec1b434c9c5f3bdf345f9
setup.py
setup.py
from setuptools import setup setup( name = "JIRA lean forward", version = "0.1.1dev", description = "Get Lean Stats like throughput and cycle time out of jira with ease", author = "Chris Young", licence = "BSD", author_email = "[email protected]", platforms = ["Any"], packages = ['jira_stats'], include_package_data = True, install_requires=[ 'python-dateutil==1.5', 'numpy', 'pandas', 'requests', 'jira-python', 'mockito', 'xlwt', 'argparse' ] )
from setuptools import setup setup( name = "JIRA lean forward", version = "0.1.1dev", description = "Get Lean Stats like throughput and cycle time out of jira with ease", author = "Chris Young", licence = "BSD", author_email = "[email protected]", platforms = ["Any"], packages = ['jira_stats'], include_package_data = True, install_requires=[ 'argparse==1.2.1', 'ipython==0.13.2', 'jira-python==0.13', 'mockito==0.5.1', 'numpy==1.7.1', 'oauthlib==0.4.0', 'pandas==0.11.0', 'python-dateutil==1.5', 'pytz==2013b', 'requests==1.2.0', 'requests-oauthlib==0.3.1', 'six==1.3.0', 'tlslite==0.4.1', 'wsgiref==0.1.2', 'xlwt==0.7.5' ] )
Set requirements to match output of pip freeze to see if that fixed Travis 2.7 build
JLF-6: Set requirements to match output of pip freeze to see if that fixed Travis 2.7 build
Python
bsd-2-clause
worldofchris/jlf
python
## Code Before: from setuptools import setup setup( name = "JIRA lean forward", version = "0.1.1dev", description = "Get Lean Stats like throughput and cycle time out of jira with ease", author = "Chris Young", licence = "BSD", author_email = "[email protected]", platforms = ["Any"], packages = ['jira_stats'], include_package_data = True, install_requires=[ 'python-dateutil==1.5', 'numpy', 'pandas', 'requests', 'jira-python', 'mockito', 'xlwt', 'argparse' ] ) ## Instruction: JLF-6: Set requirements to match output of pip freeze to see if that fixed Travis 2.7 build ## Code After: from setuptools import setup setup( name = "JIRA lean forward", version = "0.1.1dev", description = "Get Lean Stats like throughput and cycle time out of jira with ease", author = "Chris Young", licence = "BSD", author_email = "[email protected]", platforms = ["Any"], packages = ['jira_stats'], include_package_data = True, install_requires=[ 'argparse==1.2.1', 'ipython==0.13.2', 'jira-python==0.13', 'mockito==0.5.1', 'numpy==1.7.1', 'oauthlib==0.4.0', 'pandas==0.11.0', 'python-dateutil==1.5', 'pytz==2013b', 'requests==1.2.0', 'requests-oauthlib==0.3.1', 'six==1.3.0', 'tlslite==0.4.1', 'wsgiref==0.1.2', 'xlwt==0.7.5' ] )
// ... existing code ... packages = ['jira_stats'], include_package_data = True, install_requires=[ 'argparse==1.2.1', 'ipython==0.13.2', 'jira-python==0.13', 'mockito==0.5.1', 'numpy==1.7.1', 'oauthlib==0.4.0', 'pandas==0.11.0', 'python-dateutil==1.5', 'pytz==2013b', 'requests==1.2.0', 'requests-oauthlib==0.3.1', 'six==1.3.0', 'tlslite==0.4.1', 'wsgiref==0.1.2', 'xlwt==0.7.5' ] ) // ... rest of the code ...
e49ac8daeabf82708f2ba7bb623d7db73e1fcaff
readthedocs/core/subdomain_urls.py
readthedocs/core/subdomain_urls.py
from django.conf.urls.defaults import url, patterns from urls import urlpatterns as main_patterns urlpatterns = patterns('', url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^projects/(?P<project_slug>[\w.-]+)', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.serve_docs', name='docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$', 'core.views.serve_docs', {'filename': 'index.html'}, name='docs_detail' ), url(r'^$', 'core.views.subdomain_handler'), ) urlpatterns += main_patterns
from django.conf.urls.defaults import url, patterns from urls import urlpatterns as main_patterns urlpatterns = patterns('', url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^projects/(?P<project_slug>[\w.-]+)', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.serve_docs', name='docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$', 'core.views.serve_docs', {'filename': 'index.html'}, name='docs_detail' ), url(r'^(?P<version_slug>.*)/$', 'core.views.subdomain_handler', name='version_subdomain_handler' ), url(r'^$', 'core.views.subdomain_handler'), ) urlpatterns += main_patterns
Add verison_slug redirection back in for now.
Add verison_slug redirection back in for now.
Python
mit
agjohnson/readthedocs.org,kdkeyser/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,espdev/readthedocs.org,ojii/readthedocs.org,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,hach-que/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,GovReady/readthedocs.org,rtfd/readthedocs.org,titiushko/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,mhils/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,SteveViss/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,wijerasa/readthedocs.org,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,mrshoki/readthedocs.org,d0ugal/readthedocs.org,VishvajitP/readthedocs.org,Carreau/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,ojii/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,tddv/readthedocs.org,LukasBoersma/readthedocs.org,sunnyzwh/readthedocs.org,Tazer/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,emawind84/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,attakei/readthedocs-oauth,cgourlay/readthedocs.org,laplaceliu/readthedocs.org,atsuyim/readthedocs.org,espdev/readthedocs.org,sid-kap/readthedocs.org,wanghaven/readthedocs.org,raven47git/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,asampat3090/readthedocs.org,sils1297/readthedocs.org,jerel/readthedocs.org,GovReady/readthedocs.org,atsuyim/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,KamranMackey/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,attakei/readthedocs-oauth,takluyver/readthedocs.org,takluyver/readthedocs.org,dirn/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,ojii/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,fujita-shintaro/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,agjohnson/readthedocs.org,emawind84/readthedocs.org,cgourlay/readthedocs.org,CedarLogic/readthedocs.org,mrshoki/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,sils1297/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,nyergler/pythonslides,titiushko/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,nyergler/pythonslides,hach-que/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,nikolas/readthedocs.org,asampat3090/readthedocs.org,soulshake/readthedocs.org,d0ugal/readthedocs.org,LukasBoersma/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,nikolas/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,soulshake/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,dirn/readthedocs.org,Tazer/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,kenwang76/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,mrshoki/readthedocs.org,mrshoki/readthedocs.org,atsuyim/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,gjtorikian/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,michaelmcandrew/readthedocs.org,KamranMackey/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,nyergler/pythonslides,wanghaven/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,KamranMackey/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,LukasBoersma/readthedocs.org,asampat3090/readthedocs.org,stevepiercy/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,d0ugal/readthedocs.org,Carreau/readthedocs.org,wanghaven/readthedocs.org,sunnyzwh/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,clarkperkins/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,kdkeyser/readthedocs.org,wijerasa/readthedocs.org
python
## Code Before: from django.conf.urls.defaults import url, patterns from urls import urlpatterns as main_patterns urlpatterns = patterns('', url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^projects/(?P<project_slug>[\w.-]+)', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.serve_docs', name='docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$', 'core.views.serve_docs', {'filename': 'index.html'}, name='docs_detail' ), url(r'^$', 'core.views.subdomain_handler'), ) urlpatterns += main_patterns ## Instruction: Add verison_slug redirection back in for now. ## Code After: from django.conf.urls.defaults import url, patterns from urls import urlpatterns as main_patterns urlpatterns = patterns('', url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^projects/(?P<project_slug>[\w.-]+)', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.serve_docs', name='docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$', 'core.views.serve_docs', {'filename': 'index.html'}, name='docs_detail' ), url(r'^(?P<version_slug>.*)/$', 'core.views.subdomain_handler', name='version_subdomain_handler' ), url(r'^$', 'core.views.subdomain_handler'), ) urlpatterns += main_patterns
... {'filename': 'index.html'}, name='docs_detail' ), url(r'^(?P<version_slug>.*)/$', 'core.views.subdomain_handler', name='version_subdomain_handler' ), url(r'^$', 'core.views.subdomain_handler'), ) ...
3ebf82c7ef356de3c4d427cea3723737661522e8
pinax/waitinglist/management/commands/mail_out_survey_links.py
pinax/waitinglist/management/commands/mail_out_survey_links.py
from django.conf import settings from django.core.mail import EmailMessage from django.core.management.base import BaseCommand from django.template.loader import render_to_string from django.contrib.sites.models import Site from ...models import WaitingListEntry, Survey class Command(BaseCommand): help = "Email links to survey instances for those that never saw a survey" def handle(self, *args, **options): survey = Survey.objects.get(active=True) entries = WaitingListEntry.objects.filter(surveyinstance__isnull=True) for entry in entries: instance = survey.instances.create(entry=entry) site = Site.objects.get_current() protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http") ctx = { "instance": instance, "site": site, "protocol": protocol, } subject = render_to_string("waitinglist/survey_invite_subject.txt", ctx) subject = subject.strip() message = render_to_string("waitinglist/survey_invite_body.txt", ctx) EmailMessage( subject, message, to=[entry.email], from_email=settings.WAITINGLIST_SURVEY_INVITE_FROM_EMAIL ).send()
from django.conf import settings from django.core.mail import EmailMessage from django.core.management.base import BaseCommand from django.template.loader import render_to_string from django.contrib.sites.models import Site from ...models import WaitingListEntry, Survey class Command(BaseCommand): help = "Email links to survey instances for those that never saw a survey" def handle(self, *args, **options): survey = Survey.objects.get(active=True) entries = WaitingListEntry.objects.filter(surveyinstance__isnull=True) for entry in entries: instance = survey.instances.create(entry=entry) site = Site.objects.get_current() protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http") ctx = { "instance": instance, "site": site, "protocol": protocol, } subject = render_to_string("pinax/waitinglist/survey_invite_subject.txt", ctx) subject = subject.strip() message = render_to_string("pinax/waitinglist/survey_invite_body.txt", ctx) EmailMessage( subject, message, to=[entry.email], from_email=settings.WAITINGLIST_SURVEY_INVITE_FROM_EMAIL ).send()
Fix paths in mail out email management command
Fix paths in mail out email management command
Python
mit
pinax/pinax-waitinglist,pinax/pinax-waitinglist
python
## Code Before: from django.conf import settings from django.core.mail import EmailMessage from django.core.management.base import BaseCommand from django.template.loader import render_to_string from django.contrib.sites.models import Site from ...models import WaitingListEntry, Survey class Command(BaseCommand): help = "Email links to survey instances for those that never saw a survey" def handle(self, *args, **options): survey = Survey.objects.get(active=True) entries = WaitingListEntry.objects.filter(surveyinstance__isnull=True) for entry in entries: instance = survey.instances.create(entry=entry) site = Site.objects.get_current() protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http") ctx = { "instance": instance, "site": site, "protocol": protocol, } subject = render_to_string("waitinglist/survey_invite_subject.txt", ctx) subject = subject.strip() message = render_to_string("waitinglist/survey_invite_body.txt", ctx) EmailMessage( subject, message, to=[entry.email], from_email=settings.WAITINGLIST_SURVEY_INVITE_FROM_EMAIL ).send() ## Instruction: Fix paths in mail out email management command ## Code After: from django.conf import settings from django.core.mail import EmailMessage from django.core.management.base import BaseCommand from django.template.loader import render_to_string from django.contrib.sites.models import Site from ...models import WaitingListEntry, Survey class Command(BaseCommand): help = "Email links to survey instances for those that never saw a survey" def handle(self, *args, **options): survey = Survey.objects.get(active=True) entries = WaitingListEntry.objects.filter(surveyinstance__isnull=True) for entry in entries: instance = survey.instances.create(entry=entry) site = Site.objects.get_current() protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http") ctx = { "instance": instance, "site": site, "protocol": protocol, } subject = render_to_string("pinax/waitinglist/survey_invite_subject.txt", ctx) subject = subject.strip() message = render_to_string("pinax/waitinglist/survey_invite_body.txt", ctx) EmailMessage( subject, message, to=[entry.email], from_email=settings.WAITINGLIST_SURVEY_INVITE_FROM_EMAIL ).send()
// ... existing code ... "site": site, "protocol": protocol, } subject = render_to_string("pinax/waitinglist/survey_invite_subject.txt", ctx) subject = subject.strip() message = render_to_string("pinax/waitinglist/survey_invite_body.txt", ctx) EmailMessage( subject, message, // ... rest of the code ...
c3aad1ab31b84cce97555c56ec44986addca5ee8
create_schemas_and_tables.py
create_schemas_and_tables.py
import os import subprocess ODIE_DIR = os.path.dirname(__file__) subprocess.call([os.path.join(ODIE_DIR, 'create_garfield_models.py')]) subprocess.call([os.path.join(ODIE_DIR, 'create_fsmi_models.py')])
import os import subprocess ODIE_DIR = os.path.dirname(__file__) # due to Flask-SQLA only using a single MetaData object even when handling multiple # databases, we can't create let it create all our models at once (otherwise it # tries to create Enums in all databases, which will fail due to missing schemas) # We therefor create them db by db, only letting Flask-SQLA know about one db at a time. # Hence subprocesses instead of simple imports subprocess.call([os.path.join(ODIE_DIR, 'create_garfield_models.py')]) subprocess.call([os.path.join(ODIE_DIR, 'create_fsmi_models.py')])
Add comment explaining unconventional model creation
Add comment explaining unconventional model creation
Python
mit
fjalir/odie-server,Kha/odie-server,Kha/odie-server,fjalir/odie-server,fjalir/odie-server,Kha/odie-server,fsmi/odie-server,fsmi/odie-server,fsmi/odie-server
python
## Code Before: import os import subprocess ODIE_DIR = os.path.dirname(__file__) subprocess.call([os.path.join(ODIE_DIR, 'create_garfield_models.py')]) subprocess.call([os.path.join(ODIE_DIR, 'create_fsmi_models.py')]) ## Instruction: Add comment explaining unconventional model creation ## Code After: import os import subprocess ODIE_DIR = os.path.dirname(__file__) # due to Flask-SQLA only using a single MetaData object even when handling multiple # databases, we can't create let it create all our models at once (otherwise it # tries to create Enums in all databases, which will fail due to missing schemas) # We therefor create them db by db, only letting Flask-SQLA know about one db at a time. # Hence subprocesses instead of simple imports subprocess.call([os.path.join(ODIE_DIR, 'create_garfield_models.py')]) subprocess.call([os.path.join(ODIE_DIR, 'create_fsmi_models.py')])
... ODIE_DIR = os.path.dirname(__file__) # due to Flask-SQLA only using a single MetaData object even when handling multiple # databases, we can't create let it create all our models at once (otherwise it # tries to create Enums in all databases, which will fail due to missing schemas) # We therefor create them db by db, only letting Flask-SQLA know about one db at a time. # Hence subprocesses instead of simple imports subprocess.call([os.path.join(ODIE_DIR, 'create_garfield_models.py')]) subprocess.call([os.path.join(ODIE_DIR, 'create_fsmi_models.py')]) ...
2b612556339a343a7b5b5853117e4aa1cdeb710b
app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/data/TrendingRepository.kt
app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/data/TrendingRepository.kt
package com.github.ramonrabello.kiphy.trends.data import io.reactivex.Single class TrendingRepository( private val localDataSource: TrendingDataSource, private val remoteDataSource: TrendingDataSource) { fun loadTrending() = Single.merge( remoteDataSource.loadTrending(), localDataSource.loadTrending()) }
package com.github.ramonrabello.kiphy.trends.data import com.github.ramonrabello.kiphy.trends.data.source.TrendingDataSource import io.reactivex.Single class TrendingRepository( private val localDataSource: TrendingDataSource, private val remoteDataSource: TrendingDataSource) { fun loadTrending() = Single.merge( remoteDataSource.loadTrending(), localDataSource.loadTrending()) }
Update constructor to use both local and remote data source.
Update constructor to use both local and remote data source.
Kotlin
apache-2.0
ramonrabello/Kiphy
kotlin
## Code Before: package com.github.ramonrabello.kiphy.trends.data import io.reactivex.Single class TrendingRepository( private val localDataSource: TrendingDataSource, private val remoteDataSource: TrendingDataSource) { fun loadTrending() = Single.merge( remoteDataSource.loadTrending(), localDataSource.loadTrending()) } ## Instruction: Update constructor to use both local and remote data source. ## Code After: package com.github.ramonrabello.kiphy.trends.data import com.github.ramonrabello.kiphy.trends.data.source.TrendingDataSource import io.reactivex.Single class TrendingRepository( private val localDataSource: TrendingDataSource, private val remoteDataSource: TrendingDataSource) { fun loadTrending() = Single.merge( remoteDataSource.loadTrending(), localDataSource.loadTrending()) }
... package com.github.ramonrabello.kiphy.trends.data import com.github.ramonrabello.kiphy.trends.data.source.TrendingDataSource import io.reactivex.Single class TrendingRepository( ...
83b21d33fc2bce360b0860a0cb34ba48a2f8f8df
KnightsOfAlentejoAndroid-AS/app/src/main/java/br/odb/menu/ShowOutcomeActivity.java
KnightsOfAlentejoAndroid-AS/app/src/main/java/br/odb/menu/ShowOutcomeActivity.java
package br.odb.menu; import android.app.Activity; import android.graphics.Typeface; import android.os.Bundle; import android.widget.TextView; import br.odb.knights.R; public class ShowOutcomeActivity extends Activity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.outcome_layout); boolean outcomeIsGood = KnightsOfAlentejoSplashActivity.GameOutcome.valueOf( getIntent().getStringExtra( KnightsOfAlentejoSplashActivity.MAPKEY_SUCCESSFUL_LEVEL_OUTCOME) ) == KnightsOfAlentejoSplashActivity.GameOutcome.VICTORY; ((TextView) findViewById(R.id.tvOutcome)).setText( getString( outcomeIsGood ? R.string.outcome_good : R.string.outcome_bad ) ); ((TextView) findViewById(R.id.tvOutcome)).setTextColor(outcomeIsGood ? 0xFF00FF00 : 0xFFFF0000); Typeface font = Typeface.createFromAsset(getAssets(), "fonts/MedievalSharp.ttf"); ( (TextView)findViewById(R.id.tvOutcome) ).setTypeface( font ); } }
package br.odb.menu; import android.app.Activity; import android.graphics.Typeface; import android.os.Bundle; import android.widget.TextView; import br.odb.knights.R; public class ShowOutcomeActivity extends Activity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.outcome_layout); boolean outcomeIsGood = KnightsOfAlentejoSplashActivity.GameOutcome.valueOf( getIntent().getStringExtra( KnightsOfAlentejoSplashActivity.MAPKEY_SUCCESSFUL_LEVEL_OUTCOME) ) == KnightsOfAlentejoSplashActivity.GameOutcome.VICTORY; String text = getString( outcomeIsGood ? R.string.outcome_good : R.string.outcome_bad ); setTitle( text ); ((TextView) findViewById(R.id.tvOutcome)).setText( text ); ((TextView) findViewById(R.id.tvOutcome)).setTextColor(outcomeIsGood ? 0xFF00FF00 : 0xFFFF0000); Typeface font = Typeface.createFromAsset(getAssets(), "fonts/MedievalSharp.ttf"); ( (TextView)findViewById(R.id.tvOutcome) ).setTypeface( font ); } }
Use outcome for activity title in said activity
Use outcome for activity title in said activity
Java
bsd-2-clause
TheFakeMontyOnTheRun/knightsofalentejo,TheFakeMontyOnTheRun/knightsofalentejo,TheFakeMontyOnTheRun/knightsofalentejo
java
## Code Before: package br.odb.menu; import android.app.Activity; import android.graphics.Typeface; import android.os.Bundle; import android.widget.TextView; import br.odb.knights.R; public class ShowOutcomeActivity extends Activity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.outcome_layout); boolean outcomeIsGood = KnightsOfAlentejoSplashActivity.GameOutcome.valueOf( getIntent().getStringExtra( KnightsOfAlentejoSplashActivity.MAPKEY_SUCCESSFUL_LEVEL_OUTCOME) ) == KnightsOfAlentejoSplashActivity.GameOutcome.VICTORY; ((TextView) findViewById(R.id.tvOutcome)).setText( getString( outcomeIsGood ? R.string.outcome_good : R.string.outcome_bad ) ); ((TextView) findViewById(R.id.tvOutcome)).setTextColor(outcomeIsGood ? 0xFF00FF00 : 0xFFFF0000); Typeface font = Typeface.createFromAsset(getAssets(), "fonts/MedievalSharp.ttf"); ( (TextView)findViewById(R.id.tvOutcome) ).setTypeface( font ); } } ## Instruction: Use outcome for activity title in said activity ## Code After: package br.odb.menu; import android.app.Activity; import android.graphics.Typeface; import android.os.Bundle; import android.widget.TextView; import br.odb.knights.R; public class ShowOutcomeActivity extends Activity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.outcome_layout); boolean outcomeIsGood = KnightsOfAlentejoSplashActivity.GameOutcome.valueOf( getIntent().getStringExtra( KnightsOfAlentejoSplashActivity.MAPKEY_SUCCESSFUL_LEVEL_OUTCOME) ) == KnightsOfAlentejoSplashActivity.GameOutcome.VICTORY; String text = getString( outcomeIsGood ? R.string.outcome_good : R.string.outcome_bad ); setTitle( text ); ((TextView) findViewById(R.id.tvOutcome)).setText( text ); ((TextView) findViewById(R.id.tvOutcome)).setTextColor(outcomeIsGood ? 0xFF00FF00 : 0xFFFF0000); Typeface font = Typeface.createFromAsset(getAssets(), "fonts/MedievalSharp.ttf"); ( (TextView)findViewById(R.id.tvOutcome) ).setTypeface( font ); } }
... setContentView(R.layout.outcome_layout); boolean outcomeIsGood = KnightsOfAlentejoSplashActivity.GameOutcome.valueOf( getIntent().getStringExtra( KnightsOfAlentejoSplashActivity.MAPKEY_SUCCESSFUL_LEVEL_OUTCOME) ) == KnightsOfAlentejoSplashActivity.GameOutcome.VICTORY; String text = getString( outcomeIsGood ? R.string.outcome_good : R.string.outcome_bad ); setTitle( text ); ((TextView) findViewById(R.id.tvOutcome)).setText( text ); ((TextView) findViewById(R.id.tvOutcome)).setTextColor(outcomeIsGood ? 0xFF00FF00 : 0xFFFF0000); Typeface font = Typeface.createFromAsset(getAssets(), "fonts/MedievalSharp.ttf"); ( (TextView)findViewById(R.id.tvOutcome) ).setTypeface( font ); } } ...
3f4ef89512da6a3e89f27121446ec59773869017
setuptools/tests/test_setuptools.py
setuptools/tests/test_setuptools.py
import os import pytest import setuptools @pytest.fixture def example_source(tmpdir): tmpdir.mkdir('foo') (tmpdir / 'foo/bar.py').write('') (tmpdir / 'readme.txt').write('') return tmpdir def test_findall(example_source): found = list(setuptools.findall(str(example_source))) expected = ['readme.txt', 'foo/bar.py'] expected = [example_source.join(fn) for fn in expected] assert found == expected def test_findall_curdir(example_source): with example_source.as_cwd(): found = list(setuptools.findall()) expected = ['readme.txt', 'foo/bar.py'] assert found == expected @pytest.fixture def can_symlink(tmpdir): """ Skip if cannot create a symbolic link """ link_fn = 'link' target_fn = 'target' try: os.symlink(target_fn, link_fn) except (OSError, NotImplementedError, AttributeError): pytest.skip("Cannot create symbolic links") os.remove(link_fn) def test_findall_missing_symlink(tmpdir, can_symlink): with tmpdir.as_cwd(): os.symlink('foo', 'bar') found = list(setuptools.findall()) assert found == []
import os import pytest import setuptools @pytest.fixture def example_source(tmpdir): tmpdir.mkdir('foo') (tmpdir / 'foo/bar.py').write('') (tmpdir / 'readme.txt').write('') return tmpdir def test_findall(example_source): found = list(setuptools.findall(str(example_source))) expected = ['readme.txt', 'foo/bar.py'] expected = [example_source.join(fn) for fn in expected] assert found == expected def test_findall_curdir(example_source): with example_source.as_cwd(): found = list(setuptools.findall()) expected = ['readme.txt', os.path.join('foo', 'bar.py')] assert found == expected @pytest.fixture def can_symlink(tmpdir): """ Skip if cannot create a symbolic link """ link_fn = 'link' target_fn = 'target' try: os.symlink(target_fn, link_fn) except (OSError, NotImplementedError, AttributeError): pytest.skip("Cannot create symbolic links") os.remove(link_fn) def test_findall_missing_symlink(tmpdir, can_symlink): with tmpdir.as_cwd(): os.symlink('foo', 'bar') found = list(setuptools.findall()) assert found == []
Fix failing test on Windows due to path separator
Fix failing test on Windows due to path separator
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
python
## Code Before: import os import pytest import setuptools @pytest.fixture def example_source(tmpdir): tmpdir.mkdir('foo') (tmpdir / 'foo/bar.py').write('') (tmpdir / 'readme.txt').write('') return tmpdir def test_findall(example_source): found = list(setuptools.findall(str(example_source))) expected = ['readme.txt', 'foo/bar.py'] expected = [example_source.join(fn) for fn in expected] assert found == expected def test_findall_curdir(example_source): with example_source.as_cwd(): found = list(setuptools.findall()) expected = ['readme.txt', 'foo/bar.py'] assert found == expected @pytest.fixture def can_symlink(tmpdir): """ Skip if cannot create a symbolic link """ link_fn = 'link' target_fn = 'target' try: os.symlink(target_fn, link_fn) except (OSError, NotImplementedError, AttributeError): pytest.skip("Cannot create symbolic links") os.remove(link_fn) def test_findall_missing_symlink(tmpdir, can_symlink): with tmpdir.as_cwd(): os.symlink('foo', 'bar') found = list(setuptools.findall()) assert found == [] ## Instruction: Fix failing test on Windows due to path separator ## Code After: import os import pytest import setuptools @pytest.fixture def example_source(tmpdir): tmpdir.mkdir('foo') (tmpdir / 'foo/bar.py').write('') (tmpdir / 'readme.txt').write('') return tmpdir def test_findall(example_source): found = list(setuptools.findall(str(example_source))) expected = ['readme.txt', 'foo/bar.py'] expected = [example_source.join(fn) for fn in expected] assert found == expected def test_findall_curdir(example_source): with example_source.as_cwd(): found = list(setuptools.findall()) expected = ['readme.txt', os.path.join('foo', 'bar.py')] assert found == expected @pytest.fixture def can_symlink(tmpdir): """ Skip if cannot create a symbolic link """ link_fn = 'link' target_fn = 'target' try: os.symlink(target_fn, link_fn) except (OSError, NotImplementedError, AttributeError): pytest.skip("Cannot create symbolic links") os.remove(link_fn) def test_findall_missing_symlink(tmpdir, can_symlink): with tmpdir.as_cwd(): os.symlink('foo', 'bar') found = list(setuptools.findall()) assert found == []
# ... existing code ... def test_findall_curdir(example_source): with example_source.as_cwd(): found = list(setuptools.findall()) expected = ['readme.txt', os.path.join('foo', 'bar.py')] assert found == expected # ... rest of the code ...
93e2ff0dd32a72efa90222988d4289c70bb55b98
c2corg_api/models/common/fields_book.py
c2corg_api/models/common/fields_book.py
DEFAULT_FIELDS = [ 'locales.title', 'locales.summary', 'locales.description', 'locales.lang', 'author', 'editor', 'activities', 'url', 'isbn', 'book_types', 'publication_date', 'langs', 'nb_pages' ] DEFAULT_REQUIRED = [ 'locales', 'locales.title', 'book_types' ] LISTING_FIELDS = [ 'locales', 'locales.title', 'activities', 'author', 'quality', 'book_types' ] fields_book = { 'fields': DEFAULT_FIELDS, 'required': DEFAULT_REQUIRED, 'listing': LISTING_FIELDS }
DEFAULT_FIELDS = [ 'locales.title', 'locales.summary', 'locales.description', 'locales.lang', 'author', 'editor', 'activities', 'url', 'isbn', 'book_types', 'publication_date', 'langs', 'nb_pages' ] DEFAULT_REQUIRED = [ 'locales', 'locales.title', 'book_types' ] LISTING_FIELDS = [ 'locales', 'locales.title', 'locales.summary', 'activities', 'author', 'quality', 'book_types' ] fields_book = { 'fields': DEFAULT_FIELDS, 'required': DEFAULT_REQUIRED, 'listing': LISTING_FIELDS }
Add summary to book listing
Add summary to book listing
Python
agpl-3.0
c2corg/v6_api,c2corg/v6_api,c2corg/v6_api
python
## Code Before: DEFAULT_FIELDS = [ 'locales.title', 'locales.summary', 'locales.description', 'locales.lang', 'author', 'editor', 'activities', 'url', 'isbn', 'book_types', 'publication_date', 'langs', 'nb_pages' ] DEFAULT_REQUIRED = [ 'locales', 'locales.title', 'book_types' ] LISTING_FIELDS = [ 'locales', 'locales.title', 'activities', 'author', 'quality', 'book_types' ] fields_book = { 'fields': DEFAULT_FIELDS, 'required': DEFAULT_REQUIRED, 'listing': LISTING_FIELDS } ## Instruction: Add summary to book listing ## Code After: DEFAULT_FIELDS = [ 'locales.title', 'locales.summary', 'locales.description', 'locales.lang', 'author', 'editor', 'activities', 'url', 'isbn', 'book_types', 'publication_date', 'langs', 'nb_pages' ] DEFAULT_REQUIRED = [ 'locales', 'locales.title', 'book_types' ] LISTING_FIELDS = [ 'locales', 'locales.title', 'locales.summary', 'activities', 'author', 'quality', 'book_types' ] fields_book = { 'fields': DEFAULT_FIELDS, 'required': DEFAULT_REQUIRED, 'listing': LISTING_FIELDS }
// ... existing code ... LISTING_FIELDS = [ 'locales', 'locales.title', 'locales.summary', 'activities', 'author', 'quality', // ... rest of the code ...
50224b985a2215b8598f274efd33fc5c20054417
tests/test_str.py
tests/test_str.py
import pytest from hypothesis import given from hypothesis.strategies import lists, text from datatyping.datatyping import validate @given(ss=lists(text())) def test_simple(ss): assert validate([str], ss) is None @given(s=text()) def test_simple_error(s): with pytest.raises(TypeError): validate([str], s) @given(ss=lists(lists(text()))) def test_nested(ss): assert validate([[str]], ss) is None
import pytest from hypothesis import given from hypothesis.strategies import integers, text from datatyping.datatyping import validate @given(string=text()) def test_simple(string): assert validate(str, string) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string)
Rewrite str tests with hypothesis Remove lists from testing
Rewrite str tests with hypothesis Remove lists from testing
Python
mit
Zaab1t/datatyping
python
## Code Before: import pytest from hypothesis import given from hypothesis.strategies import lists, text from datatyping.datatyping import validate @given(ss=lists(text())) def test_simple(ss): assert validate([str], ss) is None @given(s=text()) def test_simple_error(s): with pytest.raises(TypeError): validate([str], s) @given(ss=lists(lists(text()))) def test_nested(ss): assert validate([[str]], ss) is None ## Instruction: Rewrite str tests with hypothesis Remove lists from testing ## Code After: import pytest from hypothesis import given from hypothesis.strategies import integers, text from datatyping.datatyping import validate @given(string=text()) def test_simple(string): assert validate(str, string) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string)
# ... existing code ... import pytest from hypothesis import given from hypothesis.strategies import integers, text from datatyping.datatyping import validate @given(string=text()) def test_simple(string): assert validate(str, string) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string) # ... rest of the code ...
4e6ec9cc5b052341094723433f58a21020fa82f0
tools/scheduler/scheduler/core.py
tools/scheduler/scheduler/core.py
class Job: def __init__(self, roles): self.roles = roles self.tasks = None self.status = None class Role: def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"): self.peers = peers self.variables = variables, self.inputs = inputs self.hostmask = hostmask class Task: def __init__(self): self.status = None
class Job: def __init__(self, roles, binary_url): self.roles = roles self.binary_url = binary_url self.tasks = None self.status = None class Role: def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"): self.peers = peers self.variables = variables, self.inputs = inputs self.hostmask = hostmask class Task: def __init__(self): self.status = None
Add binary_url member to Job.
Add binary_url member to Job.
Python
apache-2.0
DaMSL/K3,DaMSL/K3,yliu120/K3
python
## Code Before: class Job: def __init__(self, roles): self.roles = roles self.tasks = None self.status = None class Role: def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"): self.peers = peers self.variables = variables, self.inputs = inputs self.hostmask = hostmask class Task: def __init__(self): self.status = None ## Instruction: Add binary_url member to Job. ## Code After: class Job: def __init__(self, roles, binary_url): self.roles = roles self.binary_url = binary_url self.tasks = None self.status = None class Role: def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"): self.peers = peers self.variables = variables, self.inputs = inputs self.hostmask = hostmask class Task: def __init__(self): self.status = None
// ... existing code ... class Job: def __init__(self, roles, binary_url): self.roles = roles self.binary_url = binary_url self.tasks = None self.status = None // ... rest of the code ...
ce8dc3daa6a4af3c5ed743fb2b5c4470bff7647b
test_knot.py
test_knot.py
import unittest import knot class TestContainer(unittest.TestCase): def test_wrapper_looks_like_service(self): c = knot.Container() @c.service('service') def service(container): """Docstring.""" pass self.assertEqual(c['service'].__name__, 'service') self.assertEqual(c['service'].__doc__, 'Docstring.') def test_returns_if_value(self): c = knot.Container({'value': 'foobar'}) self.assertEqual(c('value'), 'foobar') def test_calls_if_service(self): c = knot.Container() @c.service('service') def service(container): return 'foobar' self.assertEqual(c('service'), 'foobar') def test_shares_service(self): c = knot.Container() @c.service('service', True) def service(container): return {} dict1 = c('service') dict2 = c('service') assert isinstance(dict1, dict) assert isinstance(dict2, dict) assert dict1 is dict2 if __name__ == '__main__': unittest.main()
import unittest import knot class TestContainer(unittest.TestCase): def test_wrapper_looks_like_service(self): c = knot.Container() @c.service('service') def service(container): """Docstring.""" pass self.assertEqual(c['service'].__name__, 'service') self.assertEqual(c['service'].__doc__, 'Docstring.') def test_returns_if_value(self): c = knot.Container({'value': 'foobar'}) self.assertEqual(c('value'), 'foobar') def test_calls_if_service(self): c = knot.Container() @c.service('service') def service(container): return 'foobar' self.assertEqual(c('service'), 'foobar') def test_returns_default_with_unknown_key(self): c = knot.Container() self.assertEqual(c('service', 'foobar'), 'foobar') self.assertEqual(c('service', lambda c: 'foobar'), 'foobar') def test_shares_service(self): c = knot.Container() @c.service('service', True) def service(container): return {} dict1 = c('service') dict2 = c('service') assert isinstance(dict1, dict) assert isinstance(dict2, dict) assert dict1 is dict2 if __name__ == '__main__': unittest.main()
Add test for default values.
Add test for default values.
Python
mit
jaapverloop/knot
python
## Code Before: import unittest import knot class TestContainer(unittest.TestCase): def test_wrapper_looks_like_service(self): c = knot.Container() @c.service('service') def service(container): """Docstring.""" pass self.assertEqual(c['service'].__name__, 'service') self.assertEqual(c['service'].__doc__, 'Docstring.') def test_returns_if_value(self): c = knot.Container({'value': 'foobar'}) self.assertEqual(c('value'), 'foobar') def test_calls_if_service(self): c = knot.Container() @c.service('service') def service(container): return 'foobar' self.assertEqual(c('service'), 'foobar') def test_shares_service(self): c = knot.Container() @c.service('service', True) def service(container): return {} dict1 = c('service') dict2 = c('service') assert isinstance(dict1, dict) assert isinstance(dict2, dict) assert dict1 is dict2 if __name__ == '__main__': unittest.main() ## Instruction: Add test for default values. ## Code After: import unittest import knot class TestContainer(unittest.TestCase): def test_wrapper_looks_like_service(self): c = knot.Container() @c.service('service') def service(container): """Docstring.""" pass self.assertEqual(c['service'].__name__, 'service') self.assertEqual(c['service'].__doc__, 'Docstring.') def test_returns_if_value(self): c = knot.Container({'value': 'foobar'}) self.assertEqual(c('value'), 'foobar') def test_calls_if_service(self): c = knot.Container() @c.service('service') def service(container): return 'foobar' self.assertEqual(c('service'), 'foobar') def test_returns_default_with_unknown_key(self): c = knot.Container() self.assertEqual(c('service', 'foobar'), 'foobar') self.assertEqual(c('service', lambda c: 'foobar'), 'foobar') def test_shares_service(self): c = knot.Container() @c.service('service', True) def service(container): return {} dict1 = c('service') dict2 = c('service') assert isinstance(dict1, dict) assert isinstance(dict2, dict) assert dict1 is dict2 if __name__ == '__main__': unittest.main()
# ... existing code ... self.assertEqual(c('service'), 'foobar') def test_returns_default_with_unknown_key(self): c = knot.Container() self.assertEqual(c('service', 'foobar'), 'foobar') self.assertEqual(c('service', lambda c: 'foobar'), 'foobar') def test_shares_service(self): c = knot.Container() # ... rest of the code ...
a452adfb297ff40ec3db71108681829769b1fba4
pyface/tasks/enaml_editor.py
pyface/tasks/enaml_editor.py
from traits.api import Instance, on_trait_change from enaml.components.constraints_widget import ConstraintsWidget # local imports from pyface.tasks.editor import Editor class EnamlEditor(Editor): """ Create an Editor for Enaml Components. """ #### EnamlEditor interface ############################################## component = Instance(ConstraintsWidget) def create_component(self): raise NotImplementedError ########################################################################### # 'IEditor' interface. ########################################################################### def create(self, parent): self.component = self.create_component() self.component.setup(parent=parent) self.control = self.component.toolkit_widget self.component.on_trait_change(self.size_hint_changed, 'size_hint_updated') def destroy(self): self.control = None self.component.destroy()
from traits.api import Instance, on_trait_change from enaml.components.constraints_widget import ConstraintsWidget # local imports from pyface.tasks.editor import Editor class EnamlEditor(Editor): """ Create an Editor for Enaml Components. """ #### EnamlEditor interface ############################################## component = Instance(ConstraintsWidget) def create_component(self): raise NotImplementedError ########################################################################### # 'IEditor' interface. ########################################################################### def create(self, parent): self.component = self.create_component() self.component.setup(parent=parent) self.control = self.component.toolkit_widget def destroy(self): self.control = None self.component.destroy()
Remove call of unimplemented method.
BUG: Remove call of unimplemented method.
Python
bsd-3-clause
brett-patterson/pyface,pankajp/pyface,geggo/pyface,geggo/pyface
python
## Code Before: from traits.api import Instance, on_trait_change from enaml.components.constraints_widget import ConstraintsWidget # local imports from pyface.tasks.editor import Editor class EnamlEditor(Editor): """ Create an Editor for Enaml Components. """ #### EnamlEditor interface ############################################## component = Instance(ConstraintsWidget) def create_component(self): raise NotImplementedError ########################################################################### # 'IEditor' interface. ########################################################################### def create(self, parent): self.component = self.create_component() self.component.setup(parent=parent) self.control = self.component.toolkit_widget self.component.on_trait_change(self.size_hint_changed, 'size_hint_updated') def destroy(self): self.control = None self.component.destroy() ## Instruction: BUG: Remove call of unimplemented method. ## Code After: from traits.api import Instance, on_trait_change from enaml.components.constraints_widget import ConstraintsWidget # local imports from pyface.tasks.editor import Editor class EnamlEditor(Editor): """ Create an Editor for Enaml Components. """ #### EnamlEditor interface ############################################## component = Instance(ConstraintsWidget) def create_component(self): raise NotImplementedError ########################################################################### # 'IEditor' interface. ########################################################################### def create(self, parent): self.component = self.create_component() self.component.setup(parent=parent) self.control = self.component.toolkit_widget def destroy(self): self.control = None self.component.destroy()
... self.component = self.create_component() self.component.setup(parent=parent) self.control = self.component.toolkit_widget def destroy(self): self.control = None ...
6692476cc7523516275f4512c32b0378574210bf
django_tenants/routers.py
django_tenants/routers.py
from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from django_tenants.utils import get_public_schema_name # for INSTALLED_APPS we need a name from django.apps import apps app_name = apps.get_app_config(app_label).name if connection.schema_name == get_public_schema_name(): if app_name not in settings.SHARED_APPS: return False else: if app_name not in settings.TENANT_APPS: return False return None
from django.conf import settings from django.apps import apps as django_apps class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def app_in_list(self, app_label, apps_list): """ Is 'app_label' present in 'apps_list'? apps_list is either settings.SHARED_APPS or settings.TENANT_APPS, a list of app names. We check the presense of the app's name or the full path to the apps's AppConfig class. https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications """ appconfig = django_apps.get_app_config(app_label) appconfig_full_name = '{}.{}'.format( appconfig.__module__, appconfig.__class__.__name__) return (appconfig.name in apps_list) or (appconfig_full_name in apps_list) def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from django_tenants.utils import get_public_schema_name if connection.schema_name == get_public_schema_name(): if not self.app_in_list(app_label, settings.SHARED_APPS): return False else: if not self.app_in_list(app_label, settings.TENANT_APPS): return False return None
Fix check of an app's presence in INSTALLED_APPS
Fix check of an app's presence in INSTALLED_APPS In TenantSyncRouter, the logic to check whether an app is a tenant app or shared app was too simplistic. Django 1.7 allows two ways to add an app to INSTALLED_APPS. 1) By specifying the app's name, and 2) By specifying the dotted path to the app's AppConfig's class. This commit ensures that we check for the latter case as well. https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications
Python
mit
sigma-geosistemas/django-tenants,tomturner/django-tenants,tomturner/django-tenants,tomturner/django-tenants,sigma-geosistemas/django-tenants
python
## Code Before: from django.conf import settings class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from django_tenants.utils import get_public_schema_name # for INSTALLED_APPS we need a name from django.apps import apps app_name = apps.get_app_config(app_label).name if connection.schema_name == get_public_schema_name(): if app_name not in settings.SHARED_APPS: return False else: if app_name not in settings.TENANT_APPS: return False return None ## Instruction: Fix check of an app's presence in INSTALLED_APPS In TenantSyncRouter, the logic to check whether an app is a tenant app or shared app was too simplistic. Django 1.7 allows two ways to add an app to INSTALLED_APPS. 1) By specifying the app's name, and 2) By specifying the dotted path to the app's AppConfig's class. This commit ensures that we check for the latter case as well. https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications ## Code After: from django.conf import settings from django.apps import apps as django_apps class TenantSyncRouter(object): """ A router to control which applications will be synced, depending if we are syncing the shared apps or the tenant apps. """ def app_in_list(self, app_label, apps_list): """ Is 'app_label' present in 'apps_list'? apps_list is either settings.SHARED_APPS or settings.TENANT_APPS, a list of app names. We check the presense of the app's name or the full path to the apps's AppConfig class. https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications """ appconfig = django_apps.get_app_config(app_label) appconfig_full_name = '{}.{}'.format( appconfig.__module__, appconfig.__class__.__name__) return (appconfig.name in apps_list) or (appconfig_full_name in apps_list) def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from django_tenants.utils import get_public_schema_name if connection.schema_name == get_public_schema_name(): if not self.app_in_list(app_label, settings.SHARED_APPS): return False else: if not self.app_in_list(app_label, settings.TENANT_APPS): return False return None
# ... existing code ... from django.conf import settings from django.apps import apps as django_apps class TenantSyncRouter(object): # ... modified code ... depending if we are syncing the shared apps or the tenant apps. """ def app_in_list(self, app_label, apps_list): """ Is 'app_label' present in 'apps_list'? apps_list is either settings.SHARED_APPS or settings.TENANT_APPS, a list of app names. We check the presense of the app's name or the full path to the apps's AppConfig class. https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications """ appconfig = django_apps.get_app_config(app_label) appconfig_full_name = '{}.{}'.format( appconfig.__module__, appconfig.__class__.__name__) return (appconfig.name in apps_list) or (appconfig_full_name in apps_list) def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 ... from django.db import connection from django_tenants.utils import get_public_schema_name if connection.schema_name == get_public_schema_name(): if not self.app_in_list(app_label, settings.SHARED_APPS): return False else: if not self.app_in_list(app_label, settings.TENANT_APPS): return False return None # ... rest of the code ...
18b4d5196fe18479e4aedfaa9030e5ba443d6a85
test_input/test70.py
test_input/test70.py
'test checking constant conditions' # __pychecker__ = '' def func1(x): 'should not produce a warning' if 1: pass while 1: print x break assert x, 'test' return 0 def func2(x): 'should produce a warning' __pychecker__ = 'constant1' if 1: pass while 1: print x break return 0 def func3(x): 'should produce a warning' if 21: return 1 if 31: return 2 assert(x, 'test') assert(5, 'test') assert 5, 'test' if 'str': return 3 return 4 def func4(x): 'should not produce a warning' if x == 204 or x == 201 or 200 <= x < 300: x = 0 if x == 1: pass while x == 'str': print x break return 0
'test checking constant conditions' # __pychecker__ = '' def func1(x): 'should not produce a warning' if 1: pass while 1: print x break assert x, 'test' return 0 def func2(x): 'should produce a warning' __pychecker__ = 'constant1' if 1: pass while 1: print x break return 0 def func3(x): 'should produce a warning' if 21: return 1 if 31: return 2 assert(x, 'test') assert(5, 'test') assert 5, 'test' if 'str': return 3 return 4 def func4(x): 'should not produce a warning' if x == 204 or x == 201 or 200 <= x < 300: x = 0 if x == 1: pass while x == 'str': print x break return 0 def func5(need_quotes, text): 'should not produce a warning' return (need_quotes) and ('"%s"' % text) or (text)
Fix a problem reported by Greg Ward and pointed out by John Machin when doing:
Fix a problem reported by Greg Ward and pointed out by John Machin when doing: return (need_quotes) and ('"%s"' % text) or (text) The following warning was generated: Using a conditional statement with a constant value ("%s") This was because even the stack wasn't modified after a BINARY_MODULO to say the value on the stack was no longer const.
Python
bsd-3-clause
mitar/pychecker,mitar/pychecker
python
## Code Before: 'test checking constant conditions' # __pychecker__ = '' def func1(x): 'should not produce a warning' if 1: pass while 1: print x break assert x, 'test' return 0 def func2(x): 'should produce a warning' __pychecker__ = 'constant1' if 1: pass while 1: print x break return 0 def func3(x): 'should produce a warning' if 21: return 1 if 31: return 2 assert(x, 'test') assert(5, 'test') assert 5, 'test' if 'str': return 3 return 4 def func4(x): 'should not produce a warning' if x == 204 or x == 201 or 200 <= x < 300: x = 0 if x == 1: pass while x == 'str': print x break return 0 ## Instruction: Fix a problem reported by Greg Ward and pointed out by John Machin when doing: return (need_quotes) and ('"%s"' % text) or (text) The following warning was generated: Using a conditional statement with a constant value ("%s") This was because even the stack wasn't modified after a BINARY_MODULO to say the value on the stack was no longer const. ## Code After: 'test checking constant conditions' # __pychecker__ = '' def func1(x): 'should not produce a warning' if 1: pass while 1: print x break assert x, 'test' return 0 def func2(x): 'should produce a warning' __pychecker__ = 'constant1' if 1: pass while 1: print x break return 0 def func3(x): 'should produce a warning' if 21: return 1 if 31: return 2 assert(x, 'test') assert(5, 'test') assert 5, 'test' if 'str': return 3 return 4 def func4(x): 'should not produce a warning' if x == 204 or x == 201 or 200 <= x < 300: x = 0 if x == 1: pass while x == 'str': print x break return 0 def func5(need_quotes, text): 'should not produce a warning' return (need_quotes) and ('"%s"' % text) or (text)
// ... existing code ... print x break return 0 def func5(need_quotes, text): 'should not produce a warning' return (need_quotes) and ('"%s"' % text) or (text) // ... rest of the code ...
4ef9aeee68051cdc10f063caede5aa2d9ad0176c
gerrit-docker/src/main/java/io/fabric8/app/gerrit/GerritModelProcessor.java
gerrit-docker/src/main/java/io/fabric8/app/gerrit/GerritModelProcessor.java
package io.fabric8.app.gerrit; import io.fabric8.kubernetes.generator.annotation.KubernetesModelProcessor; import io.fabric8.openshift.api.model.template.TemplateBuilder; @KubernetesModelProcessor public class GerritModelProcessor { public void onList(TemplateBuilder builder) { builder.addNewServiceObject() .withNewMetadata() .withName("gerrit-http-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() /* .withProtocol("HTTP")*/ .withPort(80) .withNewTargetPort(8080) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() // Second service .addNewServiceObject() .withNewMetadata() .withName("gerrit-ssh-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() /* .withProtocol("SSH")*/ .withPort(29418) .withNewTargetPort(29418) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() .build(); } }
package io.fabric8.app.gerrit; import io.fabric8.kubernetes.generator.annotation.KubernetesModelProcessor; import io.fabric8.openshift.api.model.template.TemplateBuilder; @KubernetesModelProcessor public class GerritModelProcessor { public void onList(TemplateBuilder builder) { builder.addNewServiceObject() .withNewMetadata() .withName("gerrit-http-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() .withProtocol("TCP") .withPort(80) .withNewTargetPort(8080) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() // Second service .addNewServiceObject() .withNewMetadata() .withName("gerrit-ssh-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() .withProtocol("TCP") .withPort(29418) .withNewTargetPort(29418) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() .build(); } }
Fix issue with error spec[0].protocol. The protocol to be used is TCP
Fix issue with error spec[0].protocol. The protocol to be used is TCP
Java
apache-2.0
finiteloopme/cd-jboss-fuse,finiteloopme/cd-jboss-fuse,finiteloopme/cd-jboss-fuse
java
## Code Before: package io.fabric8.app.gerrit; import io.fabric8.kubernetes.generator.annotation.KubernetesModelProcessor; import io.fabric8.openshift.api.model.template.TemplateBuilder; @KubernetesModelProcessor public class GerritModelProcessor { public void onList(TemplateBuilder builder) { builder.addNewServiceObject() .withNewMetadata() .withName("gerrit-http-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() /* .withProtocol("HTTP")*/ .withPort(80) .withNewTargetPort(8080) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() // Second service .addNewServiceObject() .withNewMetadata() .withName("gerrit-ssh-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() /* .withProtocol("SSH")*/ .withPort(29418) .withNewTargetPort(29418) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() .build(); } } ## Instruction: Fix issue with error spec[0].protocol. The protocol to be used is TCP ## Code After: package io.fabric8.app.gerrit; import io.fabric8.kubernetes.generator.annotation.KubernetesModelProcessor; import io.fabric8.openshift.api.model.template.TemplateBuilder; @KubernetesModelProcessor public class GerritModelProcessor { public void onList(TemplateBuilder builder) { builder.addNewServiceObject() .withNewMetadata() .withName("gerrit-http-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() .withProtocol("TCP") .withPort(80) .withNewTargetPort(8080) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() // Second service .addNewServiceObject() .withNewMetadata() .withName("gerrit-ssh-service") .addToLabels("component", "gerrit") .addToLabels("provider", "fabric8") .endMetadata() .withNewSpec() .addNewPort() .withProtocol("TCP") .withPort(29418) .withNewTargetPort(29418) .endPort() .addToSelector("component", "gerrit") .addToSelector("provider", "fabric8") .endSpec() .endServiceObject() .build(); } }
... .endMetadata() .withNewSpec() .addNewPort() .withProtocol("TCP") .withPort(80) .withNewTargetPort(8080) .endPort() ... .endMetadata() .withNewSpec() .addNewPort() .withProtocol("TCP") .withPort(29418) .withNewTargetPort(29418) .endPort() ...
8f993412a0110085fee10331daecfb3d36973518
__init__.py
__init__.py
import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(config) reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
Add reload to init for config
Add reload to init for config
Python
mit
reticulatingspline/Scores,cottongin/Scores
python
## Code Before: import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79: ## Instruction: Add reload to init for config ## Code After: import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(config) reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
# ... existing code ... import config import plugin reload(config) reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! # ... rest of the code ...
285eeb1c7565f8fa9fb6ba38ed843601f81cdf4e
tmc/models/document_topic.py
tmc/models/document_topic.py
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
Order document topics by name
[IMP] Order document topics by name
Python
agpl-3.0
tmcrosario/odoo-tmc
python
## Code Before: from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id ## Instruction: [IMP] Order document topics by name ## Code After: from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
# ... existing code ... _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', # ... rest of the code ...
08a5e8a8ed006f675064ffa5f872fccd9c34cc11
nabl2.solver/src/main/java/mb/nabl2/spoofax/analysis/IResult.java
nabl2.solver/src/main/java/mb/nabl2/spoofax/analysis/IResult.java
package mb.nabl2.spoofax.analysis; import java.util.List; import java.util.Optional; import mb.nabl2.constraints.IConstraint; import mb.nabl2.solver.ISolution; import mb.nabl2.terms.ITerm; public interface IResult { boolean partial(); List<IConstraint> constraints(); ISolution solution(); Optional<ITerm> customAnalysis(); IResult withCustomAnalysis(ITerm term); }
package mb.nabl2.spoofax.analysis; import java.util.List; import java.util.Optional; import mb.nabl2.constraints.IConstraint; import mb.nabl2.solver.ISolution; import mb.nabl2.terms.ITerm; public interface IResult { boolean partial(); List<IConstraint> constraints(); ISolution solution(); IResult withSolution(ISolution solution); Optional<ITerm> customAnalysis(); IResult withCustomAnalysis(ITerm term); }
Add interface method to update solution.
Add interface method to update solution.
Java
apache-2.0
metaborg/nabl,metaborg/nabl,metaborg/nabl
java
## Code Before: package mb.nabl2.spoofax.analysis; import java.util.List; import java.util.Optional; import mb.nabl2.constraints.IConstraint; import mb.nabl2.solver.ISolution; import mb.nabl2.terms.ITerm; public interface IResult { boolean partial(); List<IConstraint> constraints(); ISolution solution(); Optional<ITerm> customAnalysis(); IResult withCustomAnalysis(ITerm term); } ## Instruction: Add interface method to update solution. ## Code After: package mb.nabl2.spoofax.analysis; import java.util.List; import java.util.Optional; import mb.nabl2.constraints.IConstraint; import mb.nabl2.solver.ISolution; import mb.nabl2.terms.ITerm; public interface IResult { boolean partial(); List<IConstraint> constraints(); ISolution solution(); IResult withSolution(ISolution solution); Optional<ITerm> customAnalysis(); IResult withCustomAnalysis(ITerm term); }
// ... existing code ... ISolution solution(); IResult withSolution(ISolution solution); Optional<ITerm> customAnalysis(); IResult withCustomAnalysis(ITerm term); // ... rest of the code ...
f079d29309ad9c46be0d6000393bf3ee40da45eb
src/org/c4k3/Events/PlayerDeath.java
src/org/c4k3/Events/PlayerDeath.java
package org.c4k3.Events; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.PlayerDeathEvent; /** Class is responsible for listening to player deaths * and making sure no xp is dropped in the events */ public class PlayerDeath implements Listener { @EventHandler(priority = EventPriority.NORMAL,ignoreCancelled=true) public void onPlayerDeath(PlayerDeathEvent event) { if ( Event.isPlayerActive(event.getEntity().getUniqueId()) ) { event.setDroppedExp(0); } } }
package org.c4k3.Events; import org.bukkit.GameMode; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.PlayerDeathEvent; /** Class is responsible for listening to player deaths * and making sure no xp is dropped in the events * and that players don't keep any set gamemodes */ public class PlayerDeath implements Listener { @EventHandler(priority = EventPriority.NORMAL,ignoreCancelled=true) public void onPlayerDeath(PlayerDeathEvent event) { if ( Event.isPlayerActive(event.getEntity().getUniqueId()) ) { event.setDroppedExp(0); } Player player = event.getEntity(); if ( !player.isOp() && player.getGameMode() != GameMode.SURVIVAL ) { player.setGameMode(GameMode.SURVIVAL); } } }
Fix players being able to /kill after restart to leave with gm1
Fix players being able to /kill after restart to leave with gm1
Java
cc0-1.0
C4K3/Events
java
## Code Before: package org.c4k3.Events; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.PlayerDeathEvent; /** Class is responsible for listening to player deaths * and making sure no xp is dropped in the events */ public class PlayerDeath implements Listener { @EventHandler(priority = EventPriority.NORMAL,ignoreCancelled=true) public void onPlayerDeath(PlayerDeathEvent event) { if ( Event.isPlayerActive(event.getEntity().getUniqueId()) ) { event.setDroppedExp(0); } } } ## Instruction: Fix players being able to /kill after restart to leave with gm1 ## Code After: package org.c4k3.Events; import org.bukkit.GameMode; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.PlayerDeathEvent; /** Class is responsible for listening to player deaths * and making sure no xp is dropped in the events * and that players don't keep any set gamemodes */ public class PlayerDeath implements Listener { @EventHandler(priority = EventPriority.NORMAL,ignoreCancelled=true) public void onPlayerDeath(PlayerDeathEvent event) { if ( Event.isPlayerActive(event.getEntity().getUniqueId()) ) { event.setDroppedExp(0); } Player player = event.getEntity(); if ( !player.isOp() && player.getGameMode() != GameMode.SURVIVAL ) { player.setGameMode(GameMode.SURVIVAL); } } }
// ... existing code ... package org.c4k3.Events; import org.bukkit.GameMode; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; // ... modified code ... /** Class is responsible for listening to player deaths * and making sure no xp is dropped in the events * and that players don't keep any set gamemodes */ public class PlayerDeath implements Listener { ... } Player player = event.getEntity(); if ( !player.isOp() && player.getGameMode() != GameMode.SURVIVAL ) { player.setGameMode(GameMode.SURVIVAL); } } } // ... rest of the code ...
50d76d87b5fb905d4d472789500f9b22be8b7ff0
src/main/java/net/openhft/chronicle/core/values/LongValue.java
src/main/java/net/openhft/chronicle/core/values/LongValue.java
/* * Copyright 2016-2020 Chronicle Software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.core.values; public interface LongValue { long getValue(); void setValue(long value); long getVolatileValue(); void setVolatileValue(long value); void setOrderedValue(long value); long addValue(long delta); long addAtomicValue(long delta); boolean compareAndSwapValue(long expected, long value); default void setMaxValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos >= value) break; if (compareAndSwapValue(pos, value)) break; } } default void setMinValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos <= value) break; if (compareAndSwapValue(pos, value)) break; } } }
/* * Copyright 2016-2020 Chronicle Software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.core.values; public interface LongValue { long getValue(); void setValue(long value); long getVolatileValue(); default long getVolatileValue(long closedValue) { return getVolatileValue(); } void setVolatileValue(long value); void setOrderedValue(long value); long addValue(long delta); long addAtomicValue(long delta); boolean compareAndSwapValue(long expected, long value); default void setMaxValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos >= value) break; if (compareAndSwapValue(pos, value)) break; } } default void setMinValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos <= value) break; if (compareAndSwapValue(pos, value)) break; } } }
Support accessing a long value after being closed for graceful shutdown
Support accessing a long value after being closed for graceful shutdown
Java
apache-2.0
OpenHFT/Chronicle-Core
java
## Code Before: /* * Copyright 2016-2020 Chronicle Software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.core.values; public interface LongValue { long getValue(); void setValue(long value); long getVolatileValue(); void setVolatileValue(long value); void setOrderedValue(long value); long addValue(long delta); long addAtomicValue(long delta); boolean compareAndSwapValue(long expected, long value); default void setMaxValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos >= value) break; if (compareAndSwapValue(pos, value)) break; } } default void setMinValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos <= value) break; if (compareAndSwapValue(pos, value)) break; } } } ## Instruction: Support accessing a long value after being closed for graceful shutdown ## Code After: /* * Copyright 2016-2020 Chronicle Software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.core.values; public interface LongValue { long getValue(); void setValue(long value); long getVolatileValue(); default long getVolatileValue(long closedValue) { return getVolatileValue(); } void setVolatileValue(long value); void setOrderedValue(long value); long addValue(long delta); long addAtomicValue(long delta); boolean compareAndSwapValue(long expected, long value); default void setMaxValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos >= value) break; if (compareAndSwapValue(pos, value)) break; } } default void setMinValue(long value) { for (; ; ) { long pos = getVolatileValue(); if (pos <= value) break; if (compareAndSwapValue(pos, value)) break; } } }
# ... existing code ... void setValue(long value); long getVolatileValue(); default long getVolatileValue(long closedValue) { return getVolatileValue(); } void setVolatileValue(long value); # ... rest of the code ...
72358efa2bf9ff45377ef8ab3478b9433c67c574
candidates/feeds.py
candidates/feeds.py
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated by {1} at {2} """.format( item.source, item.ip_address, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated at {1} """.format( item.source, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
Remove IP address from feed description
Remove IP address from feed description
Python
agpl-3.0
DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextrepresentative
python
## Code Before: from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated by {1} at {2} """.format( item.source, item.ip_address, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id]) ## Instruction: Remove IP address from feed description ## Code After: from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from django.utils.feedgenerator import Atom1Feed from .models import LoggedAction class RecentChangesFeed(Feed): title = "YourNextMP recent changes" description = "Changes to YNMP candidates" link = "/feeds/changes.xml" feed_type = Atom1Feed def items(self): return LoggedAction.objects.order_by('-updated')[:50] def item_title(self, item): return "{0} - {1}".format( item.popit_person_id, item.action_type ) def item_description(self, item): description = """ {0} Updated at {1} """.format( item.source, str(item.updated), ) return description def item_link(self, item): return reverse('person-view', args=[item.popit_person_id])
... description = """ {0} Updated at {1} """.format( item.source, str(item.updated), ) ...
e14e80d55e619876c6c0cfa700737a4e6ce24915
GreenDroid/src/greendroid/util/Time.java
GreenDroid/src/greendroid/util/Time.java
package greendroid.util; /** * Utility class containing several useful constants related to time. * * @author Cyril Mottier */ public class Time { /** * The number of milliseconds in a second. */ public static final int GD_SECOND = 1000; /** * The number of milliseconds in a minute. */ public static final int GD_MINUTE = GD_SECOND * 60; /** * The number of milliseconds in an hour. */ public static final int GD_HOUR = GD_MINUTE * 60; /** * The number of milliseconds in a day. */ public static final int GD_DAY = GD_HOUR * 24; /** * The number of milliseconds in a week. */ public static final int GD_WEEK = GD_DAY * 7; }
package greendroid.util; /** * Utility class containing several useful constants related to time. * * @author Cyril Mottier */ public class Time { /** * The number of milliseconds in a second. */ public static final long GD_SECOND = 1000; /** * The number of milliseconds in a minute. */ public static final long GD_MINUTE = GD_SECOND * 60; /** * The number of milliseconds in an hour. */ public static final long GD_HOUR = GD_MINUTE * 60; /** * The number of milliseconds in a day. */ public static final long GD_DAY = GD_HOUR * 24; /** * The number of milliseconds in a week. */ public static final long GD_WEEK = GD_DAY * 7; }
Use longs for time related constants
Use longs for time related constants
Java
apache-2.0
hejunbinlan/GreenDroid,luoxiaoshenghustedu/GreenDroid,wanglj7525/GreenDroid,LiuXuApple/GreenDroid,lzy-h2o2/GreenDroid,lzy-h2o2/GreenDroid,hejunbinlan/GreenDroid,hejunbinlan/GreenDroid,yangshangwei/GreenDroid,luoxiaoshenghustedu/GreenDroid,cyrilmottier/GreenDroid,DreamZoom/GreenDroid,yangshangwei/GreenDroid,lzy-h2o2/GreenDroid,cyrilmottier/GreenDroid,HelgePlaschke/GreenDroid,DreamZoom/GreenDroid,wanglj7525/GreenDroid,LiuXuApple/GreenDroid,yangshangwei/GreenDroid,yangshangwei/GreenDroid,LiuXuApple/GreenDroid,wanglj7525/GreenDroid,HelgePlaschke/GreenDroid,cyrilmottier/GreenDroid,wanglj7525/GreenDroid,DreamZoom/GreenDroid,luoxiaoshenghustedu/GreenDroid,jurihock/voicesmith.greendroid,luoxiaoshenghustedu/GreenDroid,hejunbinlan/GreenDroid,HelgePlaschke/GreenDroid,LiuXuApple/GreenDroid,HelgePlaschke/GreenDroid,lzy-h2o2/GreenDroid,DreamZoom/GreenDroid
java
## Code Before: package greendroid.util; /** * Utility class containing several useful constants related to time. * * @author Cyril Mottier */ public class Time { /** * The number of milliseconds in a second. */ public static final int GD_SECOND = 1000; /** * The number of milliseconds in a minute. */ public static final int GD_MINUTE = GD_SECOND * 60; /** * The number of milliseconds in an hour. */ public static final int GD_HOUR = GD_MINUTE * 60; /** * The number of milliseconds in a day. */ public static final int GD_DAY = GD_HOUR * 24; /** * The number of milliseconds in a week. */ public static final int GD_WEEK = GD_DAY * 7; } ## Instruction: Use longs for time related constants ## Code After: package greendroid.util; /** * Utility class containing several useful constants related to time. * * @author Cyril Mottier */ public class Time { /** * The number of milliseconds in a second. */ public static final long GD_SECOND = 1000; /** * The number of milliseconds in a minute. */ public static final long GD_MINUTE = GD_SECOND * 60; /** * The number of milliseconds in an hour. */ public static final long GD_HOUR = GD_MINUTE * 60; /** * The number of milliseconds in a day. */ public static final long GD_DAY = GD_HOUR * 24; /** * The number of milliseconds in a week. */ public static final long GD_WEEK = GD_DAY * 7; }
... /** * The number of milliseconds in a second. */ public static final long GD_SECOND = 1000; /** * The number of milliseconds in a minute. */ public static final long GD_MINUTE = GD_SECOND * 60; /** * The number of milliseconds in an hour. */ public static final long GD_HOUR = GD_MINUTE * 60; /** * The number of milliseconds in a day. */ public static final long GD_DAY = GD_HOUR * 24; /** * The number of milliseconds in a week. */ public static final long GD_WEEK = GD_DAY * 7; } ...
e50333baa8390ae3bedb77f1442c9d90cf6ea4b0
mint/userlisting.py
mint/userlisting.py
( USERNAME_ASC, USERNAME_DES, FULLNAME_ASC, FULLNAME_DES, CREATED_ASC, CREATED_DES, ACCESSED_ASC, ACCESSED_DES ) = range(0, 8) blurbindex = 5 blurbtrunclength = 300 sqlbase = """SELECT userid, username, fullname, timeCreated, timeAccessed, blurb FROM users ORDER BY %s LIMIT %d OFFSET %d""" ordersql = { USERNAME_ASC: "username ASC", USERNAME_DES: "username DESC", FULLNAME_ASC: "fullname ASC", FULLNAME_DES: "fullname DESC", CREATED_ASC: "timeCreated ASC", CREATED_DES: "timeCreated DESC", ACCESSED_ASC: "timeAccessed ASC", ACCESSED_DES: "timeAccessed DESC" } orderhtml = { USERNAME_ASC: "Username in ascending order", USERNAME_DES: "Username in descending order", FULLNAME_ASC: "Full name in ascending order", FULLNAME_DES: "Full name in descending order", CREATED_ASC: "Oldest users", CREATED_DES: "Newest users", ACCESSED_ASC: "Least recently accessed", ACCESSED_DES: "Most recently accessed" }
( USERNAME_ASC, USERNAME_DES, FULLNAME_ASC, FULLNAME_DES, CREATED_ASC, CREATED_DES, ACCESSED_ASC, ACCESSED_DES ) = range(0, 8) blurbindex = 5 blurbtrunclength = 300 sqlbase = """SELECT userid, username, fullname, timeCreated, timeAccessed, blurb FROM users WHERE active=1 ORDER BY %s LIMIT %d OFFSET %d""" ordersql = { USERNAME_ASC: "username ASC", USERNAME_DES: "username DESC", FULLNAME_ASC: "fullname ASC", FULLNAME_DES: "fullname DESC", CREATED_ASC: "timeCreated ASC", CREATED_DES: "timeCreated DESC", ACCESSED_ASC: "timeAccessed ASC", ACCESSED_DES: "timeAccessed DESC" } orderhtml = { USERNAME_ASC: "Username in ascending order", USERNAME_DES: "Username in descending order", FULLNAME_ASC: "Full name in ascending order", FULLNAME_DES: "Full name in descending order", CREATED_ASC: "Oldest users", CREATED_DES: "Newest users", ACCESSED_ASC: "Least recently accessed", ACCESSED_DES: "Most recently accessed" }
Hide yet-to-be-activated usernames from listings
Hide yet-to-be-activated usernames from listings
Python
apache-2.0
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
python
## Code Before: ( USERNAME_ASC, USERNAME_DES, FULLNAME_ASC, FULLNAME_DES, CREATED_ASC, CREATED_DES, ACCESSED_ASC, ACCESSED_DES ) = range(0, 8) blurbindex = 5 blurbtrunclength = 300 sqlbase = """SELECT userid, username, fullname, timeCreated, timeAccessed, blurb FROM users ORDER BY %s LIMIT %d OFFSET %d""" ordersql = { USERNAME_ASC: "username ASC", USERNAME_DES: "username DESC", FULLNAME_ASC: "fullname ASC", FULLNAME_DES: "fullname DESC", CREATED_ASC: "timeCreated ASC", CREATED_DES: "timeCreated DESC", ACCESSED_ASC: "timeAccessed ASC", ACCESSED_DES: "timeAccessed DESC" } orderhtml = { USERNAME_ASC: "Username in ascending order", USERNAME_DES: "Username in descending order", FULLNAME_ASC: "Full name in ascending order", FULLNAME_DES: "Full name in descending order", CREATED_ASC: "Oldest users", CREATED_DES: "Newest users", ACCESSED_ASC: "Least recently accessed", ACCESSED_DES: "Most recently accessed" } ## Instruction: Hide yet-to-be-activated usernames from listings ## Code After: ( USERNAME_ASC, USERNAME_DES, FULLNAME_ASC, FULLNAME_DES, CREATED_ASC, CREATED_DES, ACCESSED_ASC, ACCESSED_DES ) = range(0, 8) blurbindex = 5 blurbtrunclength = 300 sqlbase = """SELECT userid, username, fullname, timeCreated, timeAccessed, blurb FROM users WHERE active=1 ORDER BY %s LIMIT %d OFFSET %d""" ordersql = { USERNAME_ASC: "username ASC", USERNAME_DES: "username DESC", FULLNAME_ASC: "fullname ASC", FULLNAME_DES: "fullname DESC", CREATED_ASC: "timeCreated ASC", CREATED_DES: "timeCreated DESC", ACCESSED_ASC: "timeAccessed ASC", ACCESSED_DES: "timeAccessed DESC" } orderhtml = { USERNAME_ASC: "Username in ascending order", USERNAME_DES: "Username in descending order", FULLNAME_ASC: "Full name in ascending order", FULLNAME_DES: "Full name in descending order", CREATED_ASC: "Oldest users", CREATED_DES: "Newest users", ACCESSED_ASC: "Least recently accessed", ACCESSED_DES: "Most recently accessed" }
# ... existing code ... blurbindex = 5 blurbtrunclength = 300 sqlbase = """SELECT userid, username, fullname, timeCreated, timeAccessed, blurb FROM users WHERE active=1 ORDER BY %s LIMIT %d OFFSET %d""" # ... rest of the code ...
8fb958821cd58016c56b5eee2c6531827e4c57b8
modules/juliet_module.py
modules/juliet_module.py
class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)");
from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
Change module class to use Rect for size and take a surface as an argument to draw()
Change module class to use Rect for size and take a surface as an argument to draw()
Python
bsd-2-clause
halfbro/juliet
python
## Code Before: class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)"); ## Instruction: Change module class to use Rect for size and take a surface as an argument to draw() ## Code After: from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
// ... existing code ... from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module() // ... rest of the code ...
9d7cec35a1771f45d0083a80e2f1823182d8d0b8
MarkovChainBibleBot/get_bible.py
MarkovChainBibleBot/get_bible.py
import requests from os import path project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt' bible_filename = 'bible.txt' bible_path = path.join('..', 'data', bible_filename) def bible_text(url=project_gutenberg_bible_url): """Get the bible text""" return requests.get(url).text def process_gutenberg_bible(url=project_gutenberg_bible_url): """Remove header and footer info""" gutenberg_header_footer_sep = '\n\r'*8 header, body, footer = bible_text(url).split(gutenberg_header_footer_sep) return body def save_internet_bible(url=project_gutenberg_bible_url): """Save bible as a text file""" bible = process_gutenberg_bible(url) with open(bible_path, 'w') as file: file.write(bible)
import requests from os import path, linesep project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt' bible_filename = 'bible.txt' bible_path = path.join('..', 'data', bible_filename) def bible_text(url=project_gutenberg_bible_url): """Get the bible text""" return requests.get(url).text def process_gutenberg_bible(url=project_gutenberg_bible_url): """Remove header and footer info""" gutenberg_header_footer_sep = linesep*8 header, body, footer = bible_text(url).split(gutenberg_header_footer_sep) return body def save_internet_bible(url=project_gutenberg_bible_url): """Save bible as a text file""" bible = process_gutenberg_bible(url) with open(bible_path, 'w') as file: file.write(bible)
Use os independent line seperator
Use os independent line seperator
Python
mit
salvor7/MarkovChainBibleBot
python
## Code Before: import requests from os import path project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt' bible_filename = 'bible.txt' bible_path = path.join('..', 'data', bible_filename) def bible_text(url=project_gutenberg_bible_url): """Get the bible text""" return requests.get(url).text def process_gutenberg_bible(url=project_gutenberg_bible_url): """Remove header and footer info""" gutenberg_header_footer_sep = '\n\r'*8 header, body, footer = bible_text(url).split(gutenberg_header_footer_sep) return body def save_internet_bible(url=project_gutenberg_bible_url): """Save bible as a text file""" bible = process_gutenberg_bible(url) with open(bible_path, 'w') as file: file.write(bible) ## Instruction: Use os independent line seperator ## Code After: import requests from os import path, linesep project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt' bible_filename = 'bible.txt' bible_path = path.join('..', 'data', bible_filename) def bible_text(url=project_gutenberg_bible_url): """Get the bible text""" return requests.get(url).text def process_gutenberg_bible(url=project_gutenberg_bible_url): """Remove header and footer info""" gutenberg_header_footer_sep = linesep*8 header, body, footer = bible_text(url).split(gutenberg_header_footer_sep) return body def save_internet_bible(url=project_gutenberg_bible_url): """Save bible as a text file""" bible = process_gutenberg_bible(url) with open(bible_path, 'w') as file: file.write(bible)
# ... existing code ... import requests from os import path, linesep project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt' # ... modified code ... def process_gutenberg_bible(url=project_gutenberg_bible_url): """Remove header and footer info""" gutenberg_header_footer_sep = linesep*8 header, body, footer = bible_text(url).split(gutenberg_header_footer_sep) return body # ... rest of the code ...
09d85cf39fd8196b26b357ee3f0b9fbb67770014
flask_jq.py
flask_jq.py
from flask import Flask, jsonify, render_template, request app = Flask(__name__) @app.route('/_add_numbers') def add_numbers(): ''' Because numbers must be added server side ''' a = request.args.get('a', 0, type=int) b = request.args.get('b', 0, type=int) return jsonify(result=a + b) @app.route('/') def index(): return render_template('index.html')
from flask import Flask, jsonify, render_template, request app = Flask(__name__) @app.route('/_add_numbers') def add_numbers(): ''' Because numbers must be added server side ''' a = request.args.get('a', 0, type=int) b = request.args.get('b', 0, type=int) return jsonify(result=a + b) @app.route('/') def index(): return render_template('index.html') if __name__ == '__main__': app.run('0.0.0.0',port=4000)
Add app run on main
Add app run on main
Python
mit
avidas/flask-jquery,avidas/flask-jquery,avidas/flask-jquery
python
## Code Before: from flask import Flask, jsonify, render_template, request app = Flask(__name__) @app.route('/_add_numbers') def add_numbers(): ''' Because numbers must be added server side ''' a = request.args.get('a', 0, type=int) b = request.args.get('b', 0, type=int) return jsonify(result=a + b) @app.route('/') def index(): return render_template('index.html') ## Instruction: Add app run on main ## Code After: from flask import Flask, jsonify, render_template, request app = Flask(__name__) @app.route('/_add_numbers') def add_numbers(): ''' Because numbers must be added server side ''' a = request.args.get('a', 0, type=int) b = request.args.get('b', 0, type=int) return jsonify(result=a + b) @app.route('/') def index(): return render_template('index.html') if __name__ == '__main__': app.run('0.0.0.0',port=4000)
// ... existing code ... @app.route('/') def index(): return render_template('index.html') if __name__ == '__main__': app.run('0.0.0.0',port=4000) // ... rest of the code ...
a16b51bb26761f8c4a30c06da4c711dac24ac3e0
mr/preprocessing.py
mr/preprocessing.py
import numpy as np from scipy.ndimage.filters import uniform_filter from scipy.ndimage.fourier import fourier_gaussian def bandpass(image, lshort, llong, threshold=1): """Convolve with a Gaussian to remove short-wavelength noise, and subtract out long-wavelength variations, retaining features of intermediate scale.""" if not 2*lshort < llong: raise ValueError("The smoothing length scale must be more" + "than twice the noise length scale.") settings = dict(mode='nearest', cval=0) boxcar = uniform_filter(image, 2*llong+1, **settings) gaussian = np.fft.ifftn(fourier_gaussian(np.fft.fftn(image), lshort)) result = gaussian - boxcar result -= threshold # Features must be this level above the background. return result.real.clip(min=0.) def scale_to_gamut(image, original_dtype): max_value = np.iinfo(original_dtype).max scaled = (max_value/image.max()*image.clip(min=0.)) return scaled.astype(original_dtype)
import numpy as np from scipy.ndimage.filters import uniform_filter from scipy.ndimage.fourier import fourier_gaussian import warnings first_run = True try: import pyfftw except ImportError: fftn = np.fft.fftn ifftn = np.fft.ifftn else: def _maybe_align(a): global planned if first_run: warnings.warn("FFTW is configuring itself. This will take " + "several sections, but subsequent calls will run " + "*much* faster.", UserWarning) planned = False return pyfftw.n_byte_align(a, a.dtype.alignment) fftn = lambda a: pyfftw.interfaces.numpy_fft.fftn(_maybe_align(a)) ifftn = lambda a: pyfftw.interfaces.numpy_fft.ifftn(_maybe_align(a)) def bandpass(image, lshort, llong, threshold=1): """Convolve with a Gaussian to remove short-wavelength noise, and subtract out long-wavelength variations, retaining features of intermediate scale.""" if not 2*lshort < llong: raise ValueError("The smoothing length scale must be more" + "than twice the noise length scale.") settings = dict(mode='nearest', cval=0) boxcar = uniform_filter(image, 2*llong+1, **settings) gaussian = ifftn(fourier_gaussian(fftn(image), lshort)) result = gaussian - boxcar result -= threshold # Features must be this level above the background. return result.real.clip(min=0.) def scale_to_gamut(image, original_dtype): max_value = np.iinfo(original_dtype).max scaled = (max_value/image.max()*image.clip(min=0.)) return scaled.astype(original_dtype)
Add optional dependence on FFTW for faster bandpass
ENH: Add optional dependence on FFTW for faster bandpass
Python
bsd-3-clause
daniorerio/trackpy,daniorerio/trackpy
python
## Code Before: import numpy as np from scipy.ndimage.filters import uniform_filter from scipy.ndimage.fourier import fourier_gaussian def bandpass(image, lshort, llong, threshold=1): """Convolve with a Gaussian to remove short-wavelength noise, and subtract out long-wavelength variations, retaining features of intermediate scale.""" if not 2*lshort < llong: raise ValueError("The smoothing length scale must be more" + "than twice the noise length scale.") settings = dict(mode='nearest', cval=0) boxcar = uniform_filter(image, 2*llong+1, **settings) gaussian = np.fft.ifftn(fourier_gaussian(np.fft.fftn(image), lshort)) result = gaussian - boxcar result -= threshold # Features must be this level above the background. return result.real.clip(min=0.) def scale_to_gamut(image, original_dtype): max_value = np.iinfo(original_dtype).max scaled = (max_value/image.max()*image.clip(min=0.)) return scaled.astype(original_dtype) ## Instruction: ENH: Add optional dependence on FFTW for faster bandpass ## Code After: import numpy as np from scipy.ndimage.filters import uniform_filter from scipy.ndimage.fourier import fourier_gaussian import warnings first_run = True try: import pyfftw except ImportError: fftn = np.fft.fftn ifftn = np.fft.ifftn else: def _maybe_align(a): global planned if first_run: warnings.warn("FFTW is configuring itself. This will take " + "several sections, but subsequent calls will run " + "*much* faster.", UserWarning) planned = False return pyfftw.n_byte_align(a, a.dtype.alignment) fftn = lambda a: pyfftw.interfaces.numpy_fft.fftn(_maybe_align(a)) ifftn = lambda a: pyfftw.interfaces.numpy_fft.ifftn(_maybe_align(a)) def bandpass(image, lshort, llong, threshold=1): """Convolve with a Gaussian to remove short-wavelength noise, and subtract out long-wavelength variations, retaining features of intermediate scale.""" if not 2*lshort < llong: raise ValueError("The smoothing length scale must be more" + "than twice the noise length scale.") settings = dict(mode='nearest', cval=0) boxcar = uniform_filter(image, 2*llong+1, **settings) gaussian = ifftn(fourier_gaussian(fftn(image), lshort)) result = gaussian - boxcar result -= threshold # Features must be this level above the background. return result.real.clip(min=0.) def scale_to_gamut(image, original_dtype): max_value = np.iinfo(original_dtype).max scaled = (max_value/image.max()*image.clip(min=0.)) return scaled.astype(original_dtype)
# ... existing code ... import numpy as np from scipy.ndimage.filters import uniform_filter from scipy.ndimage.fourier import fourier_gaussian import warnings first_run = True try: import pyfftw except ImportError: fftn = np.fft.fftn ifftn = np.fft.ifftn else: def _maybe_align(a): global planned if first_run: warnings.warn("FFTW is configuring itself. This will take " + "several sections, but subsequent calls will run " + "*much* faster.", UserWarning) planned = False return pyfftw.n_byte_align(a, a.dtype.alignment) fftn = lambda a: pyfftw.interfaces.numpy_fft.fftn(_maybe_align(a)) ifftn = lambda a: pyfftw.interfaces.numpy_fft.ifftn(_maybe_align(a)) def bandpass(image, lshort, llong, threshold=1): # ... modified code ... "than twice the noise length scale.") settings = dict(mode='nearest', cval=0) boxcar = uniform_filter(image, 2*llong+1, **settings) gaussian = ifftn(fourier_gaussian(fftn(image), lshort)) result = gaussian - boxcar result -= threshold # Features must be this level above the background. return result.real.clip(min=0.) # ... rest of the code ...
a48aa6d409885cc95cc806172e720122d30ca648
client/src/main/java/com/github/arteam/simplejsonrpc/client/generator/SecureRandomIdGenerator.java
client/src/main/java/com/github/arteam/simplejsonrpc/client/generator/SecureRandomIdGenerator.java
package com.github.arteam.simplejsonrpc.client.generator; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; /** * Date: 1/12/15 * Time: 11:17 PM * Abstract generator of secure random identifiers */ abstract class SecureRandomIdGenerator<T> implements IdGenerator<T> { private static final String SHA_1_PRNG = "SHA1PRNG"; protected final SecureRandom secureRandom; protected SecureRandomIdGenerator() { secureRandom = initSecureRandom(); } private static SecureRandom initSecureRandom() { try { SecureRandom secureRandom = SecureRandom.getInstance(SHA_1_PRNG); secureRandom.nextBytes(new byte[]{}); // Important to seed immediately after creation return secureRandom; } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } } }
package com.github.arteam.simplejsonrpc.client.generator; import java.security.SecureRandom; /** * Date: 1/12/15 * Time: 11:17 PM * Abstract generator of secure random identifiers */ abstract class SecureRandomIdGenerator<T> implements IdGenerator<T> { protected final SecureRandom secureRandom; protected SecureRandomIdGenerator() { this(new SecureRandom()); } protected SecureRandomIdGenerator(SecureRandom secureRandom) { this.secureRandom = secureRandom; } }
Use Native secure random generator by default
Use Native secure random generator by default SHA1PRNG is a relic of the old Java 6 times.
Java
mit
arteam/simple-json-rpc
java
## Code Before: package com.github.arteam.simplejsonrpc.client.generator; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; /** * Date: 1/12/15 * Time: 11:17 PM * Abstract generator of secure random identifiers */ abstract class SecureRandomIdGenerator<T> implements IdGenerator<T> { private static final String SHA_1_PRNG = "SHA1PRNG"; protected final SecureRandom secureRandom; protected SecureRandomIdGenerator() { secureRandom = initSecureRandom(); } private static SecureRandom initSecureRandom() { try { SecureRandom secureRandom = SecureRandom.getInstance(SHA_1_PRNG); secureRandom.nextBytes(new byte[]{}); // Important to seed immediately after creation return secureRandom; } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } } } ## Instruction: Use Native secure random generator by default SHA1PRNG is a relic of the old Java 6 times. ## Code After: package com.github.arteam.simplejsonrpc.client.generator; import java.security.SecureRandom; /** * Date: 1/12/15 * Time: 11:17 PM * Abstract generator of secure random identifiers */ abstract class SecureRandomIdGenerator<T> implements IdGenerator<T> { protected final SecureRandom secureRandom; protected SecureRandomIdGenerator() { this(new SecureRandom()); } protected SecureRandomIdGenerator(SecureRandom secureRandom) { this.secureRandom = secureRandom; } }
... package com.github.arteam.simplejsonrpc.client.generator; import java.security.SecureRandom; /** ... */ abstract class SecureRandomIdGenerator<T> implements IdGenerator<T> { protected final SecureRandom secureRandom; protected SecureRandomIdGenerator() { this(new SecureRandom()); } protected SecureRandomIdGenerator(SecureRandom secureRandom) { this.secureRandom = secureRandom; } } ...
ac0267d318939e4e7a62342b5dc6a09c3264ea74
flocker/node/_deploy.py
flocker/node/_deploy.py
class Deployment(object): """ """ _gear_client = None def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
from .gear import GearClient class Deployment(object): """ """ def __init__(self, gear_client=None): """ :param IGearClient gear_client: The gear client API to use in deployment operations. Default ``GearClient``. """ if gear_client is None: gear_client = GearClient(hostname=b'127.0.0.1') self._gear_client = gear_client def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
Allow a fake gear client to be supplied
Allow a fake gear client to be supplied
Python
apache-2.0
wallnerryan/flocker-profiles,hackday-profilers/flocker,lukemarsden/flocker,Azulinho/flocker,w4ngyi/flocker,hackday-profilers/flocker,LaynePeng/flocker,lukemarsden/flocker,AndyHuu/flocker,beni55/flocker,mbrukman/flocker,1d4Nf6/flocker,w4ngyi/flocker,beni55/flocker,adamtheturtle/flocker,hackday-profilers/flocker,achanda/flocker,moypray/flocker,achanda/flocker,runcom/flocker,LaynePeng/flocker,achanda/flocker,agonzalezro/flocker,1d4Nf6/flocker,LaynePeng/flocker,jml/flocker,beni55/flocker,jml/flocker,Azulinho/flocker,runcom/flocker,adamtheturtle/flocker,moypray/flocker,lukemarsden/flocker,agonzalezro/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,agonzalezro/flocker,AndyHuu/flocker,w4ngyi/flocker,moypray/flocker,Azulinho/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,mbrukman/flocker,jml/flocker,runcom/flocker,AndyHuu/flocker
python
## Code Before: class Deployment(object): """ """ _gear_client = None def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """ ## Instruction: Allow a fake gear client to be supplied ## Code After: from .gear import GearClient class Deployment(object): """ """ def __init__(self, gear_client=None): """ :param IGearClient gear_client: The gear client API to use in deployment operations. Default ``GearClient``. """ if gear_client is None: gear_client = GearClient(hostname=b'127.0.0.1') self._gear_client = gear_client def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
... from .gear import GearClient class Deployment(object): """ """ def __init__(self, gear_client=None): """ :param IGearClient gear_client: The gear client API to use in deployment operations. Default ``GearClient``. """ if gear_client is None: gear_client = GearClient(hostname=b'127.0.0.1') self._gear_client = gear_client def start_container(self, application): """ ...
0167e246b74789cc0181b603520ec7f58ef7b5fe
pandas/core/api.py
pandas/core/api.py
import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools
import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
Add new core.config API functions to the pandas top level module
ENH: Add new core.config API functions to the pandas top level module
Python
bsd-3-clause
pandas-dev/pandas,GuessWhoSamFoo/pandas,TomAugspurger/pandas,toobaz/pandas,MJuddBooth/pandas,cython-testbed/pandas,TomAugspurger/pandas,nmartensen/pandas,cython-testbed/pandas,DGrady/pandas,DGrady/pandas,datapythonista/pandas,kdebrab/pandas,dsm054/pandas,Winand/pandas,linebp/pandas,dsm054/pandas,toobaz/pandas,jmmease/pandas,zfrenchee/pandas,jorisvandenbossche/pandas,cbertinato/pandas,linebp/pandas,harisbal/pandas,rs2/pandas,linebp/pandas,nmartensen/pandas,jmmease/pandas,jreback/pandas,linebp/pandas,cbertinato/pandas,zfrenchee/pandas,nmartensen/pandas,MJuddBooth/pandas,cython-testbed/pandas,amolkahat/pandas,jmmease/pandas,cython-testbed/pandas,GuessWhoSamFoo/pandas,harisbal/pandas,zfrenchee/pandas,jmmease/pandas,jorisvandenbossche/pandas,GuessWhoSamFoo/pandas,gfyoung/pandas,amolkahat/pandas,pandas-dev/pandas,jreback/pandas,kdebrab/pandas,MJuddBooth/pandas,datapythonista/pandas,pratapvardhan/pandas,amolkahat/pandas,Winand/pandas,cbertinato/pandas,jreback/pandas,gfyoung/pandas,pandas-dev/pandas,jreback/pandas,louispotok/pandas,linebp/pandas,toobaz/pandas,gfyoung/pandas,Winand/pandas,jorisvandenbossche/pandas,rs2/pandas,DGrady/pandas,dsm054/pandas,winklerand/pandas,kdebrab/pandas,winklerand/pandas,TomAugspurger/pandas,datapythonista/pandas,winklerand/pandas,kdebrab/pandas,zfrenchee/pandas,pratapvardhan/pandas,Winand/pandas,TomAugspurger/pandas,datapythonista/pandas,toobaz/pandas,DGrady/pandas,cbertinato/pandas,rs2/pandas,rs2/pandas,DGrady/pandas,toobaz/pandas,gfyoung/pandas,harisbal/pandas,jorisvandenbossche/pandas,nmartensen/pandas,louispotok/pandas,harisbal/pandas,amolkahat/pandas,linebp/pandas,cbertinato/pandas,Winand/pandas,louispotok/pandas,Winand/pandas,pratapvardhan/pandas,nmartensen/pandas,winklerand/pandas,DGrady/pandas,gfyoung/pandas,cython-testbed/pandas,pratapvardhan/pandas,louispotok/pandas,zfrenchee/pandas,MJuddBooth/pandas,GuessWhoSamFoo/pandas,pratapvardhan/pandas,winklerand/pandas,amolkahat/pandas,kdebrab/pandas,pandas-dev/pandas,harisbal/pandas,jreback/pandas,dsm054/pandas,GuessWhoSamFoo/pandas,MJuddBooth/pandas,jmmease/pandas,winklerand/pandas,dsm054/pandas,louispotok/pandas,jmmease/pandas,nmartensen/pandas
python
## Code Before: import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools ## Instruction: ENH: Add new core.config API functions to the pandas top level module ## Code After: import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
// ... existing code ... # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options // ... rest of the code ...
562bd95583a33d69e31b1e9bdea8f2076d8df46a
src/swganh/tre/visitors/objects/object_visitor-intl.h
src/swganh/tre/visitors/objects/object_visitor-intl.h
// This file is part of SWGANH which is released under the MIT license. // See file LICENSE or go to http://swganh.com/LICENSE #pragma once namespace swganh { namespace tre { template <class T> T ObjectVisitor::attribute(const std::string& key) { std::map<std::string, std::shared_ptr<boost::any>>::const_iterator it = attributes_.find(key); if(it != attributes_.cend()) { return boost::any_cast<T>(*it->second); } throw std::runtime_error("Invalid type requested for attribute"); } } }
// This file is part of SWGANH which is released under the MIT license. // See file LICENSE or go to http://swganh.com/LICENSE #pragma once #include<stdexcept> namespace swganh { namespace tre { template <class T> T ObjectVisitor::attribute(const std::string& key) { std::map<std::string, std::shared_ptr<boost::any>>::const_iterator it = attributes_.find(key); if(it != attributes_.cend()) { return boost::any_cast<T>(*it->second); } throw std::runtime_error("Invalid type requested for attribute"); } } }
Fix for missing include that vs studio forgot to complain about
Fix for missing include that vs studio forgot to complain about
C
mit
anhstudios/swganh,anhstudios/swganh,anhstudios/swganh
c
## Code Before: // This file is part of SWGANH which is released under the MIT license. // See file LICENSE or go to http://swganh.com/LICENSE #pragma once namespace swganh { namespace tre { template <class T> T ObjectVisitor::attribute(const std::string& key) { std::map<std::string, std::shared_ptr<boost::any>>::const_iterator it = attributes_.find(key); if(it != attributes_.cend()) { return boost::any_cast<T>(*it->second); } throw std::runtime_error("Invalid type requested for attribute"); } } } ## Instruction: Fix for missing include that vs studio forgot to complain about ## Code After: // This file is part of SWGANH which is released under the MIT license. // See file LICENSE or go to http://swganh.com/LICENSE #pragma once #include<stdexcept> namespace swganh { namespace tre { template <class T> T ObjectVisitor::attribute(const std::string& key) { std::map<std::string, std::shared_ptr<boost::any>>::const_iterator it = attributes_.find(key); if(it != attributes_.cend()) { return boost::any_cast<T>(*it->second); } throw std::runtime_error("Invalid type requested for attribute"); } } }
# ... existing code ... // This file is part of SWGANH which is released under the MIT license. // See file LICENSE or go to http://swganh.com/LICENSE #pragma once #include<stdexcept> namespace swganh { namespace tre # ... rest of the code ...
e601172065ca3959c1399608c294243fa2b83cef
tests/test_SwitchController.py
tests/test_SwitchController.py
import unittest from mpf.system.machine import MachineController from tests.MpfTestCase import MpfTestCase from mock import MagicMock import time class TestSwitchController(MpfTestCase): def getConfigFile(self): return 'config.yaml' def getMachinePath(self): return '../tests/machine_files/switch_controller/' def _callback(self): self.isActive = self.machine.switch_controller.is_active("s_test", ms=300) def testIsActiveTimeing(self): self.isActive = None self.machine.switch_controller.add_switch_handler( switch_name="s_test", callback=self._callback, state=1, ms=300) self.machine.switch_controller.process_switch("s_test", 1, True) self.advance_time_and_run(3) self.assertEqual(True, self.isActive)
from tests.MpfTestCase import MpfTestCase class TestSwitchController(MpfTestCase): def getConfigFile(self): return 'config.yaml' def getMachinePath(self): return '../tests/machine_files/switch_controller/' def _callback(self): self.isActive = self.machine.switch_controller.is_active("s_test", ms=300) def test_is_active_timing(self): self.isActive = None self.machine.switch_controller.add_switch_handler( switch_name="s_test", callback=self._callback, state=1, ms=300) self.machine.switch_controller.process_switch("s_test", 1, True) self.advance_time_and_run(3) self.assertEqual(True, self.isActive) def test_initial_state(self): # tests that when MPF starts, the initial states of switches that # started in that state are read correctly. self.assertFalse(self.machine.switch_controller.is_active('s_test', 1000))
Add test for initial switch states
Add test for initial switch states
Python
mit
missionpinball/mpf,missionpinball/mpf
python
## Code Before: import unittest from mpf.system.machine import MachineController from tests.MpfTestCase import MpfTestCase from mock import MagicMock import time class TestSwitchController(MpfTestCase): def getConfigFile(self): return 'config.yaml' def getMachinePath(self): return '../tests/machine_files/switch_controller/' def _callback(self): self.isActive = self.machine.switch_controller.is_active("s_test", ms=300) def testIsActiveTimeing(self): self.isActive = None self.machine.switch_controller.add_switch_handler( switch_name="s_test", callback=self._callback, state=1, ms=300) self.machine.switch_controller.process_switch("s_test", 1, True) self.advance_time_and_run(3) self.assertEqual(True, self.isActive) ## Instruction: Add test for initial switch states ## Code After: from tests.MpfTestCase import MpfTestCase class TestSwitchController(MpfTestCase): def getConfigFile(self): return 'config.yaml' def getMachinePath(self): return '../tests/machine_files/switch_controller/' def _callback(self): self.isActive = self.machine.switch_controller.is_active("s_test", ms=300) def test_is_active_timing(self): self.isActive = None self.machine.switch_controller.add_switch_handler( switch_name="s_test", callback=self._callback, state=1, ms=300) self.machine.switch_controller.process_switch("s_test", 1, True) self.advance_time_and_run(3) self.assertEqual(True, self.isActive) def test_initial_state(self): # tests that when MPF starts, the initial states of switches that # started in that state are read correctly. self.assertFalse(self.machine.switch_controller.is_active('s_test', 1000))
# ... existing code ... from tests.MpfTestCase import MpfTestCase class TestSwitchController(MpfTestCase): # ... modified code ... def getMachinePath(self): return '../tests/machine_files/switch_controller/' def _callback(self): self.isActive = self.machine.switch_controller.is_active("s_test", ms=300) def test_is_active_timing(self): self.isActive = None self.machine.switch_controller.add_switch_handler( ... self.advance_time_and_run(3) self.assertEqual(True, self.isActive) def test_initial_state(self): # tests that when MPF starts, the initial states of switches that # started in that state are read correctly. self.assertFalse(self.machine.switch_controller.is_active('s_test', 1000)) # ... rest of the code ...
142b9fa072e5958273e67ff38f8c7c7f6ea51373
laboratory/exceptions.py
laboratory/exceptions.py
class LaboratoryException(Exception): pass class MismatchException(LaboratoryException): pass
class LaboratoryException(Exception): def __init__(self, message, *a, **kw): self.message = message super(LaboratoryException, self).__init__(*a, **kw) class MismatchException(LaboratoryException): pass
Add message attr to LaboratoryException
Add message attr to LaboratoryException
Python
mit
joealcorn/laboratory
python
## Code Before: class LaboratoryException(Exception): pass class MismatchException(LaboratoryException): pass ## Instruction: Add message attr to LaboratoryException ## Code After: class LaboratoryException(Exception): def __init__(self, message, *a, **kw): self.message = message super(LaboratoryException, self).__init__(*a, **kw) class MismatchException(LaboratoryException): pass
# ... existing code ... class LaboratoryException(Exception): def __init__(self, message, *a, **kw): self.message = message super(LaboratoryException, self).__init__(*a, **kw) class MismatchException(LaboratoryException): # ... rest of the code ...
46dfcdbc53671c78f37a74fa23edf2471bd9405a
src/main/java/com/homeadvisor/kafdrop/config/SchemaRegistryConfiguration.java
src/main/java/com/homeadvisor/kafdrop/config/SchemaRegistryConfiguration.java
package com.homeadvisor.kafdrop.config; import org.hibernate.validator.constraints.NotBlank; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; import java.util.List; import java.util.regex.Pattern; import java.util.stream.Collectors; @Configuration public class SchemaRegistryConfiguration { @Component @ConfigurationProperties(prefix = "schemaregistry") public static class SchemaRegistryProperties { public static final Pattern CONNECT_SEPARATOR = Pattern.compile("\\s*,\\s*"); @NotBlank private String connect; public String getConnect() { return connect; } public void setConnect(String connect) { this.connect = connect; } public List<String> getConnectList() { return CONNECT_SEPARATOR.splitAsStream(this.connect) .map(String::trim) .filter(s -> s.length() > 0) .collect(Collectors.toList()); } } }
package com.homeadvisor.kafdrop.config; import org.hibernate.validator.constraints.NotBlank; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; @Configuration public class SchemaRegistryConfiguration { @Component @ConfigurationProperties(prefix = "schemaregistry") public static class SchemaRegistryProperties { @NotBlank private String connect; public String getConnect() { return connect; } public void setConnect(String connect) { this.connect = connect; } } }
Remove list-parsing code from schema registry config parser
Remove list-parsing code from schema registry config parser
Java
apache-2.0
HomeAdvisor/Kafdrop,HomeAdvisor/Kafdrop,HomeAdvisor/Kafdrop
java
## Code Before: package com.homeadvisor.kafdrop.config; import org.hibernate.validator.constraints.NotBlank; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; import java.util.List; import java.util.regex.Pattern; import java.util.stream.Collectors; @Configuration public class SchemaRegistryConfiguration { @Component @ConfigurationProperties(prefix = "schemaregistry") public static class SchemaRegistryProperties { public static final Pattern CONNECT_SEPARATOR = Pattern.compile("\\s*,\\s*"); @NotBlank private String connect; public String getConnect() { return connect; } public void setConnect(String connect) { this.connect = connect; } public List<String> getConnectList() { return CONNECT_SEPARATOR.splitAsStream(this.connect) .map(String::trim) .filter(s -> s.length() > 0) .collect(Collectors.toList()); } } } ## Instruction: Remove list-parsing code from schema registry config parser ## Code After: package com.homeadvisor.kafdrop.config; import org.hibernate.validator.constraints.NotBlank; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; @Configuration public class SchemaRegistryConfiguration { @Component @ConfigurationProperties(prefix = "schemaregistry") public static class SchemaRegistryProperties { @NotBlank private String connect; public String getConnect() { return connect; } public void setConnect(String connect) { this.connect = connect; } } }
# ... existing code ... import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; @Configuration public class SchemaRegistryConfiguration { # ... modified code ... @ConfigurationProperties(prefix = "schemaregistry") public static class SchemaRegistryProperties { @NotBlank private String connect; ... this.connect = connect; } } } # ... rest of the code ...
9c2951d794bb27952606cae77da1ebcd0d651e72
aiodownload/api.py
aiodownload/api.py
from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( AioDownload(url, info=info) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( download.main(AioDownloadBundle(url, info=info)) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
Fix - needed to provide create_task a function, not a class
Fix - needed to provide create_task a function, not a class
Python
mit
jelloslinger/aiodownload
python
## Code Before: from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( AioDownload(url, info=info) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x) ## Instruction: Fix - needed to provide create_task a function, not a class ## Code After: from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( download.main(AioDownloadBundle(url, info=info)) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
// ... existing code ... info = None if i == url else i tasks.append( download._loop.create_task( download.main(AioDownloadBundle(url, info=info)) ) ) // ... rest of the code ...
9971e5424b998f45e26b9da8288f20d641885043
massa/__init__.py
massa/__init__.py
from flask import Flask, render_template, g from flask.ext.appconfig import AppConfig def create_app(configfile=None): app = Flask('massa') AppConfig(app, configfile) @app.route('/') def index(): return render_template('index.html') from .container import build sl = build(app.config) from .api import bp app.register_blueprint(bp, url_prefix='/api') @app.before_request def globals(): g.sl = sl return app
from flask import Flask, render_template, g from flask.ext.appconfig import AppConfig from .container import build from .api import bp as api def create_app(configfile=None): app = Flask('massa') AppConfig(app, configfile) @app.route('/') def index(): return render_template('index.html') sl = build(app.config) app.register_blueprint(api, url_prefix='/api') @app.before_request def globals(): g.sl = sl return app
Move import statements to the top.
Move import statements to the top.
Python
mit
jaapverloop/massa
python
## Code Before: from flask import Flask, render_template, g from flask.ext.appconfig import AppConfig def create_app(configfile=None): app = Flask('massa') AppConfig(app, configfile) @app.route('/') def index(): return render_template('index.html') from .container import build sl = build(app.config) from .api import bp app.register_blueprint(bp, url_prefix='/api') @app.before_request def globals(): g.sl = sl return app ## Instruction: Move import statements to the top. ## Code After: from flask import Flask, render_template, g from flask.ext.appconfig import AppConfig from .container import build from .api import bp as api def create_app(configfile=None): app = Flask('massa') AppConfig(app, configfile) @app.route('/') def index(): return render_template('index.html') sl = build(app.config) app.register_blueprint(api, url_prefix='/api') @app.before_request def globals(): g.sl = sl return app
// ... existing code ... from flask import Flask, render_template, g from flask.ext.appconfig import AppConfig from .container import build from .api import bp as api def create_app(configfile=None): // ... modified code ... def index(): return render_template('index.html') sl = build(app.config) app.register_blueprint(api, url_prefix='/api') @app.before_request def globals(): // ... rest of the code ...
cdb66acf92dae34d1b17c2a429738c73fb06caad
scoring/checks/ldap.py
scoring/checks/ldap.py
from __future__ import absolute_import from config import config import ldap # DEFAULTS ldap_config = { 'timeout': 5 } # /DEFAULTS # CONFIG if "ldap" in config: ldap_config.update(config["ldap"]) # /CONFIG def check_ldap_lookup(check, data): check.addOutput("ScoreEngine: %s Check\n" % (check.getServiceName())) check.addOutput("EXPECTED: Sucessful and correct query against the AD (LDAP) server") check.addOutput("OUTPUT:\n") check.addOutput("Starting check...") try: # Setup LDAP l = ldap.initialize('ldap://%s' % data["HOST"]) # Bind to the user we're using to lookup username = data["USER"] password = data["PASS"] l.protocol_version = ldap.VERSION3 l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"]) l.simple_bind_s(username, password) # We're good! check.setPassed() check.addOutput("Check successful!") except Exception as e: check.addOutput("ERROR: %s: %s" % (type(e).__name__, e)) return
from __future__ import absolute_import from config import config import ldap # DEFAULTS ldap_config = { 'timeout': 5 } # /DEFAULTS # CONFIG if "ldap" in config: ldap_config.update(config["ldap"]) # /CONFIG def check_ldap_lookup(check, data): check.addOutput("ScoreEngine: %s Check\n" % (check.getServiceName())) check.addOutput("EXPECTED: Sucessful and correct query against the AD (LDAP) server") check.addOutput("OUTPUT:\n") check.addOutput("Starting check...") try: # Setup LDAP l = ldap.initialize('ldap://%s' % data["HOST"]) # Bind to the user we're using to lookup domain = data["DOMAIN"] username = data["USER"] password = data["PASS"] actual_username = "%s\%s" % (domain, username) l.protocol_version = ldap.VERSION3 l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"]) l.simple_bind_s(actual_username, password) # We're good! check.setPassed() check.addOutput("Check successful!") except Exception as e: check.addOutput("ERROR: %s: %s" % (type(e).__name__, e)) return
Make LDAP check even better
Make LDAP check even better
Python
mit
ubnetdef/scoreengine,ubnetdef/scoreengine
python
## Code Before: from __future__ import absolute_import from config import config import ldap # DEFAULTS ldap_config = { 'timeout': 5 } # /DEFAULTS # CONFIG if "ldap" in config: ldap_config.update(config["ldap"]) # /CONFIG def check_ldap_lookup(check, data): check.addOutput("ScoreEngine: %s Check\n" % (check.getServiceName())) check.addOutput("EXPECTED: Sucessful and correct query against the AD (LDAP) server") check.addOutput("OUTPUT:\n") check.addOutput("Starting check...") try: # Setup LDAP l = ldap.initialize('ldap://%s' % data["HOST"]) # Bind to the user we're using to lookup username = data["USER"] password = data["PASS"] l.protocol_version = ldap.VERSION3 l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"]) l.simple_bind_s(username, password) # We're good! check.setPassed() check.addOutput("Check successful!") except Exception as e: check.addOutput("ERROR: %s: %s" % (type(e).__name__, e)) return ## Instruction: Make LDAP check even better ## Code After: from __future__ import absolute_import from config import config import ldap # DEFAULTS ldap_config = { 'timeout': 5 } # /DEFAULTS # CONFIG if "ldap" in config: ldap_config.update(config["ldap"]) # /CONFIG def check_ldap_lookup(check, data): check.addOutput("ScoreEngine: %s Check\n" % (check.getServiceName())) check.addOutput("EXPECTED: Sucessful and correct query against the AD (LDAP) server") check.addOutput("OUTPUT:\n") check.addOutput("Starting check...") try: # Setup LDAP l = ldap.initialize('ldap://%s' % data["HOST"]) # Bind to the user we're using to lookup domain = data["DOMAIN"] username = data["USER"] password = data["PASS"] actual_username = "%s\%s" % (domain, username) l.protocol_version = ldap.VERSION3 l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"]) l.simple_bind_s(actual_username, password) # We're good! check.setPassed() check.addOutput("Check successful!") except Exception as e: check.addOutput("ERROR: %s: %s" % (type(e).__name__, e)) return
... l = ldap.initialize('ldap://%s' % data["HOST"]) # Bind to the user we're using to lookup domain = data["DOMAIN"] username = data["USER"] password = data["PASS"] actual_username = "%s\%s" % (domain, username) l.protocol_version = ldap.VERSION3 l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"]) l.simple_bind_s(actual_username, password) # We're good! check.setPassed() ...
6906b2286a023b850550eb8b5e7628a0664b5742
src/main/java/com/elmakers/mine/bukkit/plugins/magic/spells/GrenadeSpell.java
src/main/java/com/elmakers/mine/bukkit/plugins/magic/spells/GrenadeSpell.java
package com.elmakers.mine.bukkit.plugins.magic.spells; import org.bukkit.Location; import org.bukkit.block.Block; import org.bukkit.entity.EntityType; import org.bukkit.entity.TNTPrimed; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.plugins.magic.Spell; import com.elmakers.mine.bukkit.utilities.borrowed.ConfigurationNode; public class GrenadeSpell extends Spell { @Override public boolean onCast(ConfigurationNode parameters) { Block target = getNextBlock(); Location loc = target.getLocation(); TNTPrimed grenade = (TNTPrimed)player.getWorld().spawnEntity(loc, EntityType.PRIMED_TNT); Vector aim = getAimVector(); grenade.setVelocity(aim); grenade.setYield(6); grenade.setFuseTicks(80); return true; } }
package com.elmakers.mine.bukkit.plugins.magic.spells; import org.bukkit.Location; import org.bukkit.block.Block; import org.bukkit.entity.EntityType; import org.bukkit.entity.TNTPrimed; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.plugins.magic.Spell; import com.elmakers.mine.bukkit.utilities.borrowed.ConfigurationNode; public class GrenadeSpell extends Spell { int defaultSize = 6; @Override public boolean onCast(ConfigurationNode parameters) { int size = parameters.getInt("size", defaultSize); int fuse = parameters.getInt("fuse", 80); boolean useFire = parameters.getBoolean("fire", true); Block target = getNextBlock(); Location loc = target.getLocation(); TNTPrimed grenade = (TNTPrimed)player.getWorld().spawnEntity(loc, EntityType.PRIMED_TNT); Vector aim = getAimVector(); grenade.setVelocity(aim); grenade.setYield(size); grenade.setFuseTicks(fuse); grenade.setIsIncendiary(useFire); return true; } }
Add parameters to grenade spell.
Add parameters to grenade spell.
Java
mit
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
java
## Code Before: package com.elmakers.mine.bukkit.plugins.magic.spells; import org.bukkit.Location; import org.bukkit.block.Block; import org.bukkit.entity.EntityType; import org.bukkit.entity.TNTPrimed; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.plugins.magic.Spell; import com.elmakers.mine.bukkit.utilities.borrowed.ConfigurationNode; public class GrenadeSpell extends Spell { @Override public boolean onCast(ConfigurationNode parameters) { Block target = getNextBlock(); Location loc = target.getLocation(); TNTPrimed grenade = (TNTPrimed)player.getWorld().spawnEntity(loc, EntityType.PRIMED_TNT); Vector aim = getAimVector(); grenade.setVelocity(aim); grenade.setYield(6); grenade.setFuseTicks(80); return true; } } ## Instruction: Add parameters to grenade spell. ## Code After: package com.elmakers.mine.bukkit.plugins.magic.spells; import org.bukkit.Location; import org.bukkit.block.Block; import org.bukkit.entity.EntityType; import org.bukkit.entity.TNTPrimed; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.plugins.magic.Spell; import com.elmakers.mine.bukkit.utilities.borrowed.ConfigurationNode; public class GrenadeSpell extends Spell { int defaultSize = 6; @Override public boolean onCast(ConfigurationNode parameters) { int size = parameters.getInt("size", defaultSize); int fuse = parameters.getInt("fuse", 80); boolean useFire = parameters.getBoolean("fire", true); Block target = getNextBlock(); Location loc = target.getLocation(); TNTPrimed grenade = (TNTPrimed)player.getWorld().spawnEntity(loc, EntityType.PRIMED_TNT); Vector aim = getAimVector(); grenade.setVelocity(aim); grenade.setYield(size); grenade.setFuseTicks(fuse); grenade.setIsIncendiary(useFire); return true; } }
... public class GrenadeSpell extends Spell { int defaultSize = 6; @Override public boolean onCast(ConfigurationNode parameters) { int size = parameters.getInt("size", defaultSize); int fuse = parameters.getInt("fuse", 80); boolean useFire = parameters.getBoolean("fire", true); Block target = getNextBlock(); Location loc = target.getLocation(); TNTPrimed grenade = (TNTPrimed)player.getWorld().spawnEntity(loc, EntityType.PRIMED_TNT); ... Vector aim = getAimVector(); grenade.setVelocity(aim); grenade.setYield(size); grenade.setFuseTicks(fuse); grenade.setIsIncendiary(useFire); return true; } ...
78e24093f314821d7818f31574dbe521c0ae5fef
sharepa/__init__.py
sharepa/__init__.py
from sharepa.search import ShareSearch, basic_search # noqa from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
from sharepa.search import ShareSearch, basic_search # noqa from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa def source_counts(): return bucket_to_dataframe( 'total_source_counts', basic_search.execute().aggregations.sourceAgg.buckets )
Make it so that source_counts is only executed on purpose
Make it so that source_counts is only executed on purpose
Python
mit
erinspace/sharepa,CenterForOpenScience/sharepa,fabianvf/sharepa,samanehsan/sharepa
python
## Code Before: from sharepa.search import ShareSearch, basic_search # noqa from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa ## Instruction: Make it so that source_counts is only executed on purpose ## Code After: from sharepa.search import ShareSearch, basic_search # noqa from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa def source_counts(): return bucket_to_dataframe( 'total_source_counts', basic_search.execute().aggregations.sourceAgg.buckets )
// ... existing code ... from sharepa.search import ShareSearch, basic_search # noqa from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa def source_counts(): return bucket_to_dataframe( 'total_source_counts', basic_search.execute().aggregations.sourceAgg.buckets ) // ... rest of the code ...
084f70c87cf4a22d797ac282ba7f074802f6e6b3
setup.py
setup.py
from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup(name='rafem', version='0.1.0', author='Katherine Ratliff', author_email='[email protected]', description='River Avulsion Flooplain Evolution Model', long_description=open('README.rst').read(), url='https://github.com/katmratliff/avulsion-bmi', license='MIT', packages=find_packages(), )
from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages from model_metadata.utils import get_cmdclass, get_entry_points pymt_components = [ ( "BmiRiverModule=rafem:BmiRiverModule", ".bmi", ) ] setup(name='rafem', version='0.1.0', author='Katherine Ratliff', author_email='[email protected]', description='River Avulsion Flooplain Evolution Model', long_description=open('README.rst').read(), url='https://github.com/katmratliff/avulsion-bmi', license='MIT', packages=find_packages(), cmdclass=get_cmdclass(pymt_components), entry_points=get_entry_points(pymt_components), )
Install rafem as a pymt component.
Install rafem as a pymt component.
Python
mit
katmratliff/avulsion-bmi,mcflugen/avulsion-bmi
python
## Code Before: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup(name='rafem', version='0.1.0', author='Katherine Ratliff', author_email='[email protected]', description='River Avulsion Flooplain Evolution Model', long_description=open('README.rst').read(), url='https://github.com/katmratliff/avulsion-bmi', license='MIT', packages=find_packages(), ) ## Instruction: Install rafem as a pymt component. ## Code After: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages from model_metadata.utils import get_cmdclass, get_entry_points pymt_components = [ ( "BmiRiverModule=rafem:BmiRiverModule", ".bmi", ) ] setup(name='rafem', version='0.1.0', author='Katherine Ratliff', author_email='[email protected]', description='River Avulsion Flooplain Evolution Model', long_description=open('README.rst').read(), url='https://github.com/katmratliff/avulsion-bmi', license='MIT', packages=find_packages(), cmdclass=get_cmdclass(pymt_components), entry_points=get_entry_points(pymt_components), )
# ... existing code ... from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages from model_metadata.utils import get_cmdclass, get_entry_points pymt_components = [ ( "BmiRiverModule=rafem:BmiRiverModule", ".bmi", ) ] setup(name='rafem', # ... modified code ... url='https://github.com/katmratliff/avulsion-bmi', license='MIT', packages=find_packages(), cmdclass=get_cmdclass(pymt_components), entry_points=get_entry_points(pymt_components), ) # ... rest of the code ...
4d1ab55f2bbe8041421002a91dc4f58783913591
services/search_indexes.py
services/search_indexes.py
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, ))
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
Implement language-specific aspects of indexing
Implement language-specific aspects of indexing
Python
bsd-3-clause
theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo
python
## Code Before: from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, )) ## Instruction: Implement language-specific aspects of indexing ## Code After: from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
... index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, )) ...
865b9d8307f35203d7242e9c431ec2f6cb65c42e
whyis/manager.py
whyis/manager.py
import flask_script as script from whyis import commands from whyis.app_factory import app_factory from whyis.config_utils import import_config_module class Manager(script.Manager): def __init__(self): script.Manager.__init__(self, app_factory) config = import_config_module() self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name) self.add_option("-c", "--config", dest="config", required=False, default=config.Dev) self.add_command("configure", commands.Configure()) self.add_command("createuser", commands.CreateUser()) self.add_command("listroutes", commands.ListRoutes()) self.add_command("interpret", commands.RunInterpreter()) self.add_command("load", commands.LoadNanopub()) self.add_command("retire", commands.RetireNanopub()) self.add_command("runserver", commands.WhyisServer()) self.add_command("test", commands.Test()) self.add_command("testagent", commands.TestAgent()) self.add_command("updateuser", commands.UpdateUser()) self.add_command("uninstallapp", commands.UninstallApp())
import flask_script as script from whyis import commands from whyis.app_factory import app_factory from whyis.config_utils import import_config_module class Manager(script.Manager): def __init__(self): script.Manager.__init__(self, app_factory) config = import_config_module() self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name) self.add_option("-c", "--config", dest="config", required=False, default=config.Dev) self.add_command("configure", commands.Configure()) self.add_command("createuser", commands.CreateUser()) self.add_command("interpret", commands.ListRoutes()) self.add_command("list_routes", commands.ListRoutes()) self.add_command("load", commands.LoadNanopub()) self.add_command("retire", commands.RetireNanopub()) self.add_command("runserver", commands.WhyisServer()) self.add_command("test", commands.Test()) self.add_command("test_agent", commands.TestAgent()) self.add_command("updateuser", commands.UpdateUser()) self.add_command("uninstall_app", commands.UninstallApp())
Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced." This reverts commit 7827598d1060442570685e94633093c550ce7ff2.
Python
apache-2.0
tetherless-world/graphene,tetherless-world/graphene,tetherless-world/satoru,tetherless-world/satoru,tetherless-world/satoru,tetherless-world/graphene,tetherless-world/satoru,tetherless-world/graphene
python
## Code Before: import flask_script as script from whyis import commands from whyis.app_factory import app_factory from whyis.config_utils import import_config_module class Manager(script.Manager): def __init__(self): script.Manager.__init__(self, app_factory) config = import_config_module() self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name) self.add_option("-c", "--config", dest="config", required=False, default=config.Dev) self.add_command("configure", commands.Configure()) self.add_command("createuser", commands.CreateUser()) self.add_command("listroutes", commands.ListRoutes()) self.add_command("interpret", commands.RunInterpreter()) self.add_command("load", commands.LoadNanopub()) self.add_command("retire", commands.RetireNanopub()) self.add_command("runserver", commands.WhyisServer()) self.add_command("test", commands.Test()) self.add_command("testagent", commands.TestAgent()) self.add_command("updateuser", commands.UpdateUser()) self.add_command("uninstallapp", commands.UninstallApp()) ## Instruction: Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced." This reverts commit 7827598d1060442570685e94633093c550ce7ff2. ## Code After: import flask_script as script from whyis import commands from whyis.app_factory import app_factory from whyis.config_utils import import_config_module class Manager(script.Manager): def __init__(self): script.Manager.__init__(self, app_factory) config = import_config_module() self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name) self.add_option("-c", "--config", dest="config", required=False, default=config.Dev) self.add_command("configure", commands.Configure()) self.add_command("createuser", commands.CreateUser()) self.add_command("interpret", commands.ListRoutes()) self.add_command("list_routes", commands.ListRoutes()) self.add_command("load", commands.LoadNanopub()) self.add_command("retire", commands.RetireNanopub()) self.add_command("runserver", commands.WhyisServer()) self.add_command("test", commands.Test()) self.add_command("test_agent", commands.TestAgent()) self.add_command("updateuser", commands.UpdateUser()) self.add_command("uninstall_app", commands.UninstallApp())
... self.add_option("-c", "--config", dest="config", required=False, default=config.Dev) self.add_command("configure", commands.Configure()) self.add_command("createuser", commands.CreateUser()) self.add_command("interpret", commands.ListRoutes()) self.add_command("list_routes", commands.ListRoutes()) self.add_command("load", commands.LoadNanopub()) self.add_command("retire", commands.RetireNanopub()) self.add_command("runserver", commands.WhyisServer()) self.add_command("test", commands.Test()) self.add_command("test_agent", commands.TestAgent()) self.add_command("updateuser", commands.UpdateUser()) self.add_command("uninstall_app", commands.UninstallApp()) ...
0223b6fc332bdbc8a641832ebd06b79969b65853
pyfibot/modules/module_btc.py
pyfibot/modules/module_btc.py
from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate))
from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
Use mtgox as data source
Use mtgox as data source
Python
bsd-3-clause
rnyberg/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,huqa/pyfibot,aapa/pyfibot,huqa/pyfibot,rnyberg/pyfibot,EArmour/pyfibot,lepinkainen/pyfibot,aapa/pyfibot
python
## Code Before: from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate)) ## Instruction: Use mtgox as data source ## Code After: from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
// ... existing code ... def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur)) // ... rest of the code ...
ac4692e213f7090d10dc565c6b787b6c02f76aaf
src/test/java/com/github/tomakehurst/wiremock/admin/ResponseDefinitionBodyMatcherTest.java
src/test/java/com/github/tomakehurst/wiremock/admin/ResponseDefinitionBodyMatcherTest.java
package com.github.tomakehurst.wiremock.admin; import com.github.tomakehurst.wiremock.admin.model.ResponseDefinitionBodyMatcher; import com.github.tomakehurst.wiremock.http.ResponseDefinition; import org.junit.Test; import static com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder.responseDefinition; import static junit.framework.TestCase.assertFalse; public class ResponseDefinitionBodyMatcherTest { @Test public void doesNotMatchEmptyBody() { ResponseDefinition emptyBody = responseDefinition().build(); ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 0); assertFalse(matcher.match(emptyBody).isExactMatch()); } }
package com.github.tomakehurst.wiremock.admin; import com.github.tomakehurst.wiremock.admin.model.ResponseDefinitionBodyMatcher; import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; import com.github.tomakehurst.wiremock.http.ResponseDefinition; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ResponseDefinitionBodyMatcherTest { @Test public void noThresholds() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 0); assertFalse(matcher.match(new ResponseDefinition()).isExactMatch()); assertTrue(matcher.match(textResponseDefinition("a")).isExactMatch()); assertTrue(matcher.match(binaryResponseDefinition(new byte[] { 0x1 })).isExactMatch()); } @Test public void textBodyMatchingWithThreshold() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(2, 0); assertFalse(matcher.match(textResponseDefinition("f")).isExactMatch()); assertFalse(matcher.match(textResponseDefinition("fo")).isExactMatch()); assertTrue(matcher.match(textResponseDefinition("foo")).isExactMatch()); } @Test public void binaryBodyMatchingWithThreshold() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 2); assertFalse(matcher.match(binaryResponseDefinition(new byte[] { 0x1 })).isExactMatch()); assertFalse(matcher.match(binaryResponseDefinition(new byte[] { 0x1, 0xc })).isExactMatch()); assertTrue(matcher.match(binaryResponseDefinition(new byte[] { 0x1, 0xc, 0xf })).isExactMatch()); } private static ResponseDefinition textResponseDefinition(String body) { return new ResponseDefinitionBuilder() .withHeader("Content-Type", "text/plain") .withBody(body) .build(); } private static ResponseDefinition binaryResponseDefinition(byte[] body) { return new ResponseDefinitionBuilder() .withBody(body) .build(); } }
Add more tests for ResponseDefinitionBodyMatcher
Add more tests for ResponseDefinitionBodyMatcher
Java
apache-2.0
Mahoney/wiremock,Mahoney/wiremock,dlaha21/wiremock,tomakehurst/wiremock,dlaha21/wiremock,tomakehurst/wiremock,Mahoney/wiremock,tricker/wiremock,dlaha21/wiremock,dlaha21/wiremock,Mahoney/wiremock,tomakehurst/wiremock,tricker/wiremock,Mahoney/wiremock,tomakehurst/wiremock,tricker/wiremock,tricker/wiremock,tomakehurst/wiremock,tricker/wiremock,dlaha21/wiremock
java
## Code Before: package com.github.tomakehurst.wiremock.admin; import com.github.tomakehurst.wiremock.admin.model.ResponseDefinitionBodyMatcher; import com.github.tomakehurst.wiremock.http.ResponseDefinition; import org.junit.Test; import static com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder.responseDefinition; import static junit.framework.TestCase.assertFalse; public class ResponseDefinitionBodyMatcherTest { @Test public void doesNotMatchEmptyBody() { ResponseDefinition emptyBody = responseDefinition().build(); ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 0); assertFalse(matcher.match(emptyBody).isExactMatch()); } } ## Instruction: Add more tests for ResponseDefinitionBodyMatcher ## Code After: package com.github.tomakehurst.wiremock.admin; import com.github.tomakehurst.wiremock.admin.model.ResponseDefinitionBodyMatcher; import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; import com.github.tomakehurst.wiremock.http.ResponseDefinition; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ResponseDefinitionBodyMatcherTest { @Test public void noThresholds() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 0); assertFalse(matcher.match(new ResponseDefinition()).isExactMatch()); assertTrue(matcher.match(textResponseDefinition("a")).isExactMatch()); assertTrue(matcher.match(binaryResponseDefinition(new byte[] { 0x1 })).isExactMatch()); } @Test public void textBodyMatchingWithThreshold() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(2, 0); assertFalse(matcher.match(textResponseDefinition("f")).isExactMatch()); assertFalse(matcher.match(textResponseDefinition("fo")).isExactMatch()); assertTrue(matcher.match(textResponseDefinition("foo")).isExactMatch()); } @Test public void binaryBodyMatchingWithThreshold() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 2); assertFalse(matcher.match(binaryResponseDefinition(new byte[] { 0x1 })).isExactMatch()); assertFalse(matcher.match(binaryResponseDefinition(new byte[] { 0x1, 0xc })).isExactMatch()); assertTrue(matcher.match(binaryResponseDefinition(new byte[] { 0x1, 0xc, 0xf })).isExactMatch()); } private static ResponseDefinition textResponseDefinition(String body) { return new ResponseDefinitionBuilder() .withHeader("Content-Type", "text/plain") .withBody(body) .build(); } private static ResponseDefinition binaryResponseDefinition(byte[] body) { return new ResponseDefinitionBuilder() .withBody(body) .build(); } }
... package com.github.tomakehurst.wiremock.admin; import com.github.tomakehurst.wiremock.admin.model.ResponseDefinitionBodyMatcher; import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; import com.github.tomakehurst.wiremock.http.ResponseDefinition; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ResponseDefinitionBodyMatcherTest { @Test public void noThresholds() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 0); assertFalse(matcher.match(new ResponseDefinition()).isExactMatch()); assertTrue(matcher.match(textResponseDefinition("a")).isExactMatch()); assertTrue(matcher.match(binaryResponseDefinition(new byte[] { 0x1 })).isExactMatch()); } @Test public void textBodyMatchingWithThreshold() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(2, 0); assertFalse(matcher.match(textResponseDefinition("f")).isExactMatch()); assertFalse(matcher.match(textResponseDefinition("fo")).isExactMatch()); assertTrue(matcher.match(textResponseDefinition("foo")).isExactMatch()); } @Test public void binaryBodyMatchingWithThreshold() { ResponseDefinitionBodyMatcher matcher = new ResponseDefinitionBodyMatcher(0, 2); assertFalse(matcher.match(binaryResponseDefinition(new byte[] { 0x1 })).isExactMatch()); assertFalse(matcher.match(binaryResponseDefinition(new byte[] { 0x1, 0xc })).isExactMatch()); assertTrue(matcher.match(binaryResponseDefinition(new byte[] { 0x1, 0xc, 0xf })).isExactMatch()); } private static ResponseDefinition textResponseDefinition(String body) { return new ResponseDefinitionBuilder() .withHeader("Content-Type", "text/plain") .withBody(body) .build(); } private static ResponseDefinition binaryResponseDefinition(byte[] body) { return new ResponseDefinitionBuilder() .withBody(body) .build(); } } ...
991b4861d9ac80ec99dfb8cb700a4342331220ea
src/main/java/com/laytonsmith/abstraction/enums/MCSpawnReason.java
src/main/java/com/laytonsmith/abstraction/enums/MCSpawnReason.java
package com.laytonsmith.abstraction.enums; import com.laytonsmith.annotations.MEnum; @MEnum("com.commandhelper.SpawnReason") public enum MCSpawnReason { BREEDING, BUILD_IRONGOLEM, BUILD_SNOWMAN, BUILD_WITHER, /** * Deprecated as of 1.14, no longer used. */ CHUNK_GEN, /** * Spawned by plugins */ CUSTOM, /** * Missing spawn reason */ DEFAULT, /** * The kind of egg you throw */ EGG, JOCKEY, LIGHTNING, NATURAL, REINFORCEMENTS, SHOULDER_ENTITY, SLIME_SPLIT, SPAWNER, SPAWNER_EGG, VILLAGE_DEFENSE, VILLAGE_INVASION, NETHER_PORTAL, DISPENSE_EGG, INFECTION, CURED, OCELOT_BABY, SILVERFISH_BLOCK, MOUNT, TRAP, ENDER_PEARL, DROWNED, SHEARED, EXPLOSION }
package com.laytonsmith.abstraction.enums; import com.laytonsmith.annotations.MEnum; @MEnum("com.commandhelper.SpawnReason") public enum MCSpawnReason { BREEDING, BUILD_IRONGOLEM, BUILD_SNOWMAN, BUILD_WITHER, /** * Deprecated as of 1.14, no longer used. */ CHUNK_GEN, /** * Spawned by plugins */ CUSTOM, /** * Missing spawn reason */ DEFAULT, /** * The kind of egg you throw */ EGG, JOCKEY, LIGHTNING, NATURAL, PATROL, RAID, REINFORCEMENTS, SHOULDER_ENTITY, SLIME_SPLIT, SPAWNER, SPAWNER_EGG, VILLAGE_DEFENSE, VILLAGE_INVASION, NETHER_PORTAL, DISPENSE_EGG, INFECTION, CURED, OCELOT_BABY, SILVERFISH_BLOCK, MOUNT, TRAP, ENDER_PEARL, DROWNED, SHEARED, EXPLOSION }
Add PATROL and RAID entity spawn reasons
Add PATROL and RAID entity spawn reasons
Java
mit
sk89q/CommandHelper,sk89q/CommandHelper,sk89q/CommandHelper,sk89q/CommandHelper
java
## Code Before: package com.laytonsmith.abstraction.enums; import com.laytonsmith.annotations.MEnum; @MEnum("com.commandhelper.SpawnReason") public enum MCSpawnReason { BREEDING, BUILD_IRONGOLEM, BUILD_SNOWMAN, BUILD_WITHER, /** * Deprecated as of 1.14, no longer used. */ CHUNK_GEN, /** * Spawned by plugins */ CUSTOM, /** * Missing spawn reason */ DEFAULT, /** * The kind of egg you throw */ EGG, JOCKEY, LIGHTNING, NATURAL, REINFORCEMENTS, SHOULDER_ENTITY, SLIME_SPLIT, SPAWNER, SPAWNER_EGG, VILLAGE_DEFENSE, VILLAGE_INVASION, NETHER_PORTAL, DISPENSE_EGG, INFECTION, CURED, OCELOT_BABY, SILVERFISH_BLOCK, MOUNT, TRAP, ENDER_PEARL, DROWNED, SHEARED, EXPLOSION } ## Instruction: Add PATROL and RAID entity spawn reasons ## Code After: package com.laytonsmith.abstraction.enums; import com.laytonsmith.annotations.MEnum; @MEnum("com.commandhelper.SpawnReason") public enum MCSpawnReason { BREEDING, BUILD_IRONGOLEM, BUILD_SNOWMAN, BUILD_WITHER, /** * Deprecated as of 1.14, no longer used. */ CHUNK_GEN, /** * Spawned by plugins */ CUSTOM, /** * Missing spawn reason */ DEFAULT, /** * The kind of egg you throw */ EGG, JOCKEY, LIGHTNING, NATURAL, PATROL, RAID, REINFORCEMENTS, SHOULDER_ENTITY, SLIME_SPLIT, SPAWNER, SPAWNER_EGG, VILLAGE_DEFENSE, VILLAGE_INVASION, NETHER_PORTAL, DISPENSE_EGG, INFECTION, CURED, OCELOT_BABY, SILVERFISH_BLOCK, MOUNT, TRAP, ENDER_PEARL, DROWNED, SHEARED, EXPLOSION }
... JOCKEY, LIGHTNING, NATURAL, PATROL, RAID, REINFORCEMENTS, SHOULDER_ENTITY, SLIME_SPLIT, ...
6cd9b0c731839a75cd8e8bd2ab1e5d2f2687c96a
shirka/responders/__init__.py
shirka/responders/__init__.py
class Responder(object): def support(message): pass def generate(message): pass def on_start(self, consumer): return False def support(self, request): return request.content[0:len(self.name())] == self.name() class Response(object): def __init__(self, content): self.content = content self.tags = [] self.command = "" def __str__(self): return self.content class StreamResponse(Response): def __init__(self, content): self.is_completed = False self.content = content def stop(self): self.is_completed = True def handle(self, request, consumer): self.is_completed = True def __str__(self): return "<StreamResponse>" from rageface import RagefaceResponder from flowdock import FlowdockWhoisResponder from math import MathResponder from wat import WatResponder from xkcd import XkcdResponder from bigbro import BigbroResponder from ascii import AsciiResponder from ninegag import NineGagResponder from link import LinkResponder from reminder import ReminderResponder from status import StatusResponder from help import HelpResponder from remote import RemoteResponder from monitor import MonitorResponder from process import ProcessResponder from so import SoResponder from jira_responder import JiraResponder from graphite import GraphiteResponder
class Responder(object): def support(message): pass def generate(message): pass def on_start(self, consumer): return False def support(self, request): return request.content[0:len(self.name())] == self.name() class Response(object): def __init__(self, content): self.content = content self.tags = [] self.command = "" def __str__(self): return self.content class StreamResponse(Response): def __init__(self, content): self.is_completed = False self.content = content def stop(self): self.is_completed = True def handle(self, request, consumer): self.is_completed = True def __str__(self): return "<StreamResponse>" from rageface import RagefaceResponder from flowdock import FlowdockWhoisResponder from math import MathResponder from wat import WatResponder from xkcd import XkcdResponder from bigbro import BigbroResponder from ascii import AsciiResponder from ninegag import NineGagResponder from link import LinkResponder from reminder import ReminderResponder from status import StatusResponder from help import HelpResponder from remote import RemoteResponder from monitor import MonitorResponder from process import ProcessResponder from so import SoResponder from jira_responder import JiraResponder
Remove import for graphite responder
Remove import for graphite responder
Python
mit
rande/python-shirka,rande/python-shirka
python
## Code Before: class Responder(object): def support(message): pass def generate(message): pass def on_start(self, consumer): return False def support(self, request): return request.content[0:len(self.name())] == self.name() class Response(object): def __init__(self, content): self.content = content self.tags = [] self.command = "" def __str__(self): return self.content class StreamResponse(Response): def __init__(self, content): self.is_completed = False self.content = content def stop(self): self.is_completed = True def handle(self, request, consumer): self.is_completed = True def __str__(self): return "<StreamResponse>" from rageface import RagefaceResponder from flowdock import FlowdockWhoisResponder from math import MathResponder from wat import WatResponder from xkcd import XkcdResponder from bigbro import BigbroResponder from ascii import AsciiResponder from ninegag import NineGagResponder from link import LinkResponder from reminder import ReminderResponder from status import StatusResponder from help import HelpResponder from remote import RemoteResponder from monitor import MonitorResponder from process import ProcessResponder from so import SoResponder from jira_responder import JiraResponder from graphite import GraphiteResponder ## Instruction: Remove import for graphite responder ## Code After: class Responder(object): def support(message): pass def generate(message): pass def on_start(self, consumer): return False def support(self, request): return request.content[0:len(self.name())] == self.name() class Response(object): def __init__(self, content): self.content = content self.tags = [] self.command = "" def __str__(self): return self.content class StreamResponse(Response): def __init__(self, content): self.is_completed = False self.content = content def stop(self): self.is_completed = True def handle(self, request, consumer): self.is_completed = True def __str__(self): return "<StreamResponse>" from rageface import RagefaceResponder from flowdock import FlowdockWhoisResponder from math import MathResponder from wat import WatResponder from xkcd import XkcdResponder from bigbro import BigbroResponder from ascii import AsciiResponder from ninegag import NineGagResponder from link import LinkResponder from reminder import ReminderResponder from status import StatusResponder from help import HelpResponder from remote import RemoteResponder from monitor import MonitorResponder from process import ProcessResponder from so import SoResponder from jira_responder import JiraResponder
# ... existing code ... from process import ProcessResponder from so import SoResponder from jira_responder import JiraResponder # ... rest of the code ...
35397c33f1b52f158c11941e17211eb699836003
tests/integration/indexer-test.py
tests/integration/indexer-test.py
from nose import tools as nose import unittest from shiva.app import app from shiva.indexer import Indexer class IndexerTestCase(unittest.TestCase): def test_main(self): with app.app_context(): lola = Indexer(app.config) nose.eq_(lola.run(), None)
from nose import tools as nose import unittest from shiva.app import app, db from shiva.indexer import Indexer class IndexerTestCase(unittest.TestCase): def setUp(self): db.create_all() def test_main(self): with app.app_context(): app.config['MEDIA_DIRS'] = [] lola = Indexer(app.config) nose.eq_(lola.run(), None) def tearDown(self): db.drop_all()
Fix to indexer integration tests
Fix to indexer integration tests
Python
mit
tooxie/shiva-server,maurodelazeri/shiva-server,tooxie/shiva-server,maurodelazeri/shiva-server
python
## Code Before: from nose import tools as nose import unittest from shiva.app import app from shiva.indexer import Indexer class IndexerTestCase(unittest.TestCase): def test_main(self): with app.app_context(): lola = Indexer(app.config) nose.eq_(lola.run(), None) ## Instruction: Fix to indexer integration tests ## Code After: from nose import tools as nose import unittest from shiva.app import app, db from shiva.indexer import Indexer class IndexerTestCase(unittest.TestCase): def setUp(self): db.create_all() def test_main(self): with app.app_context(): app.config['MEDIA_DIRS'] = [] lola = Indexer(app.config) nose.eq_(lola.run(), None) def tearDown(self): db.drop_all()
... from nose import tools as nose import unittest from shiva.app import app, db from shiva.indexer import Indexer class IndexerTestCase(unittest.TestCase): def setUp(self): db.create_all() def test_main(self): with app.app_context(): app.config['MEDIA_DIRS'] = [] lola = Indexer(app.config) nose.eq_(lola.run(), None) def tearDown(self): db.drop_all() ...
36bd8bf151d28c9063f5356176bb7007ee822565
src/main/java/org/col/api/Reference.java
src/main/java/org/col/api/Reference.java
package org.col.api; /** * Simplified literature reference class for proof of concept only. */ public class Reference { private Integer key; private Object csl; private Serial serial; private Integer year; private String doi; }
package org.col.api; import java.util.Objects; /** * Simplified literature reference class for proof of concept only. */ public class Reference { private Integer key; private Object csl; private Serial serial; private Integer year; private String doi; @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Reference reference = (Reference) o; return Objects.equals(key, reference.key) && Objects.equals(csl, reference.csl) && Objects.equals(serial, reference.serial) && Objects.equals(year, reference.year) && Objects.equals(doi, reference.doi); } @Override public int hashCode() { return Objects.hash(key, csl, serial, year, doi); } }
Add reference hash & equals methods
Add reference hash & equals methods
Java
apache-2.0
Sp2000/colplus-backend
java
## Code Before: package org.col.api; /** * Simplified literature reference class for proof of concept only. */ public class Reference { private Integer key; private Object csl; private Serial serial; private Integer year; private String doi; } ## Instruction: Add reference hash & equals methods ## Code After: package org.col.api; import java.util.Objects; /** * Simplified literature reference class for proof of concept only. */ public class Reference { private Integer key; private Object csl; private Serial serial; private Integer year; private String doi; @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Reference reference = (Reference) o; return Objects.equals(key, reference.key) && Objects.equals(csl, reference.csl) && Objects.equals(serial, reference.serial) && Objects.equals(year, reference.year) && Objects.equals(doi, reference.doi); } @Override public int hashCode() { return Objects.hash(key, csl, serial, year, doi); } }
# ... existing code ... package org.col.api; import java.util.Objects; /** * Simplified literature reference class for proof of concept only. # ... modified code ... private Integer year; private String doi; @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Reference reference = (Reference) o; return Objects.equals(key, reference.key) && Objects.equals(csl, reference.csl) && Objects.equals(serial, reference.serial) && Objects.equals(year, reference.year) && Objects.equals(doi, reference.doi); } @Override public int hashCode() { return Objects.hash(key, csl, serial, year, doi); } } # ... rest of the code ...
8348f46fb78b55c5d2bcd6401f4041e8890072db
gviewer/keys/vim.py
gviewer/keys/vim.py
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] )
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
Add ctrl+d/ctrl+u for page down and page up
Add ctrl+d/ctrl+u for page down and page up
Python
mit
chhsiao90/gviewer
python
## Code Before: from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] ) ## Instruction: Add ctrl+d/ctrl+u for page down and page up ## Code After: from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
# ... existing code ... ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] ) # ... rest of the code ...
d3837972d5aff2812ea534e053695373497192d5
cheroot/__init__.py
cheroot/__init__.py
try: import pkg_resources __version__ = pkg_resources.get_distribution('cheroot').version except ImportError: __version__ = 'unknown'
try: import pkg_resources __version__ = pkg_resources.get_distribution('cheroot').version except (ImportError, pkg_resources.DistributionNotFound): __version__ = 'unknown'
Handle DistributionNotFound when getting version
Handle DistributionNotFound when getting version When frozen with e.g. cx_Freeze, cheroot will be importable, but not discoverable by pkg_resources.
Python
bsd-3-clause
cherrypy/cheroot
python
## Code Before: try: import pkg_resources __version__ = pkg_resources.get_distribution('cheroot').version except ImportError: __version__ = 'unknown' ## Instruction: Handle DistributionNotFound when getting version When frozen with e.g. cx_Freeze, cheroot will be importable, but not discoverable by pkg_resources. ## Code After: try: import pkg_resources __version__ = pkg_resources.get_distribution('cheroot').version except (ImportError, pkg_resources.DistributionNotFound): __version__ = 'unknown'
... try: import pkg_resources __version__ = pkg_resources.get_distribution('cheroot').version except (ImportError, pkg_resources.DistributionNotFound): __version__ = 'unknown' ...