commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
23e984fe24428241b873b93a4ca541b69d3345d2
nipy/labs/viz_tools/test/test_cm.py
nipy/labs/viz_tools/test/test_cm.py
from nose import SkipTest try: import matplotlib as mp # Make really sure that we don't try to open an Xserver connection. mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') except ImportError: raise SkipTest('Could not import matplotlib') from ..cm import dim_cmap, replace_inside def test_dim_cmap(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl dim_cmap(pl.cm.jet) def test_replace_inside(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) # We also test with gnuplot, which is defined using function replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
from nose import SkipTest try: import matplotlib as mp # Make really sure that we don't try to open an Xserver connection. mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') except ImportError: raise SkipTest('Could not import matplotlib') from ..cm import dim_cmap, replace_inside def test_dim_cmap(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl dim_cmap(pl.cm.jet) def test_replace_inside(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) # We also test with gnuplot, which is defined using function if hasattr(pl.cm, 'gnuplot'): # gnuplot is only in recent version of MPL replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
Fix tests on old MPL
BUG: Fix tests on old MPL Old MPL do not have function-defined colormaps, so the corresponding code path cannot be tested.
Python
bsd-3-clause
alexis-roche/nipy,nipy/nipy-labs,arokem/nipy,arokem/nipy,alexis-roche/nipy,alexis-roche/nireg,alexis-roche/register,alexis-roche/niseg,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/register,arokem/nipy,nipy/nireg,nipy/nireg,bthirion/nipy,alexis-roche/nireg,alexis-roche/niseg,arokem/nipy,alexis-roche/register,bthirion/nipy,nipy/nipy-labs
from nose import SkipTest try: import matplotlib as mp # Make really sure that we don't try to open an Xserver connection. mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') except ImportError: raise SkipTest('Could not import matplotlib') from ..cm import dim_cmap, replace_inside def test_dim_cmap(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl dim_cmap(pl.cm.jet) def test_replace_inside(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) # We also test with gnuplot, which is defined using function + if hasattr(pl.cm, 'gnuplot'): + # gnuplot is only in recent version of MPL - replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8) + replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
Fix tests on old MPL
## Code Before: from nose import SkipTest try: import matplotlib as mp # Make really sure that we don't try to open an Xserver connection. mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') except ImportError: raise SkipTest('Could not import matplotlib') from ..cm import dim_cmap, replace_inside def test_dim_cmap(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl dim_cmap(pl.cm.jet) def test_replace_inside(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) # We also test with gnuplot, which is defined using function replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8) ## Instruction: Fix tests on old MPL ## Code After: from nose import SkipTest try: import matplotlib as mp # Make really sure that we don't try to open an Xserver connection. mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') except ImportError: raise SkipTest('Could not import matplotlib') from ..cm import dim_cmap, replace_inside def test_dim_cmap(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl dim_cmap(pl.cm.jet) def test_replace_inside(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) # We also test with gnuplot, which is defined using function if hasattr(pl.cm, 'gnuplot'): # gnuplot is only in recent version of MPL replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
from nose import SkipTest try: import matplotlib as mp # Make really sure that we don't try to open an Xserver connection. mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') except ImportError: raise SkipTest('Could not import matplotlib') from ..cm import dim_cmap, replace_inside def test_dim_cmap(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl dim_cmap(pl.cm.jet) def test_replace_inside(): # This is only a smoke test mp.use('svg', warn=False) import pylab as pl pl.switch_backend('svg') replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) # We also test with gnuplot, which is defined using function + if hasattr(pl.cm, 'gnuplot'): + # gnuplot is only in recent version of MPL - replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8) + replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8) ? ++++
e8935189659e882f534f5605086dc76ce7ce881b
rdrf/rdrf/admin.py
rdrf/rdrf/admin.py
from django.contrib import admin from models import * from registry.groups.models import User class SectionAdmin(admin.ModelAdmin): list_display = ('code', 'display_name') class RegistryFormAdmin(admin.ModelAdmin): list_display = ('registry', 'name', 'sections') class RegistryAdmin(admin.ModelAdmin): def queryset(self, request): if not request.user.is_superuser: user = User.objects.get(user=request.user) return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()]) return Registry.objects.all() admin.site.register(CDEPermittedValue) admin.site.register(CDEPermittedValueGroup) admin.site.register(CommonDataElement) admin.site.register(Wizard) admin.site.register(RegistryForm, RegistryFormAdmin) admin.site.register(Section, SectionAdmin) admin.site.register(Registry, RegistryAdmin)
from django.contrib import admin from models import * from registry.groups.models import User class SectionAdmin(admin.ModelAdmin): list_display = ('code', 'display_name') class RegistryFormAdmin(admin.ModelAdmin): list_display = ('registry', 'name', 'sections') class RegistryAdmin(admin.ModelAdmin): def queryset(self, request): if not request.user.is_superuser: user = User.objects.get(user=request.user) return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()]) return Registry.objects.all() def has_add_permission(self, request): if request.user.is_superuser: return True return False admin.site.register(CDEPermittedValue) admin.site.register(CDEPermittedValueGroup) admin.site.register(CommonDataElement) admin.site.register(Wizard) admin.site.register(RegistryForm, RegistryFormAdmin) admin.site.register(Section, SectionAdmin) admin.site.register(Registry, RegistryAdmin)
Disable adding registries for non-superusers
Disable adding registries for non-superusers
Python
agpl-3.0
muccg/rdrf,muccg/rdrf,muccg/rdrf,muccg/rdrf,muccg/rdrf
from django.contrib import admin from models import * from registry.groups.models import User class SectionAdmin(admin.ModelAdmin): list_display = ('code', 'display_name') class RegistryFormAdmin(admin.ModelAdmin): list_display = ('registry', 'name', 'sections') class RegistryAdmin(admin.ModelAdmin): def queryset(self, request): if not request.user.is_superuser: user = User.objects.get(user=request.user) return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()]) return Registry.objects.all() + def has_add_permission(self, request): + if request.user.is_superuser: + return True + return False + admin.site.register(CDEPermittedValue) admin.site.register(CDEPermittedValueGroup) admin.site.register(CommonDataElement) admin.site.register(Wizard) admin.site.register(RegistryForm, RegistryFormAdmin) admin.site.register(Section, SectionAdmin) admin.site.register(Registry, RegistryAdmin)
Disable adding registries for non-superusers
## Code Before: from django.contrib import admin from models import * from registry.groups.models import User class SectionAdmin(admin.ModelAdmin): list_display = ('code', 'display_name') class RegistryFormAdmin(admin.ModelAdmin): list_display = ('registry', 'name', 'sections') class RegistryAdmin(admin.ModelAdmin): def queryset(self, request): if not request.user.is_superuser: user = User.objects.get(user=request.user) return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()]) return Registry.objects.all() admin.site.register(CDEPermittedValue) admin.site.register(CDEPermittedValueGroup) admin.site.register(CommonDataElement) admin.site.register(Wizard) admin.site.register(RegistryForm, RegistryFormAdmin) admin.site.register(Section, SectionAdmin) admin.site.register(Registry, RegistryAdmin) ## Instruction: Disable adding registries for non-superusers ## Code After: from django.contrib import admin from models import * from registry.groups.models import User class SectionAdmin(admin.ModelAdmin): list_display = ('code', 'display_name') class RegistryFormAdmin(admin.ModelAdmin): list_display = ('registry', 'name', 'sections') class RegistryAdmin(admin.ModelAdmin): def queryset(self, request): if not request.user.is_superuser: user = User.objects.get(user=request.user) return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()]) return Registry.objects.all() def has_add_permission(self, request): if request.user.is_superuser: return True return False admin.site.register(CDEPermittedValue) admin.site.register(CDEPermittedValueGroup) admin.site.register(CommonDataElement) admin.site.register(Wizard) admin.site.register(RegistryForm, RegistryFormAdmin) admin.site.register(Section, SectionAdmin) admin.site.register(Registry, RegistryAdmin)
from django.contrib import admin from models import * from registry.groups.models import User class SectionAdmin(admin.ModelAdmin): list_display = ('code', 'display_name') class RegistryFormAdmin(admin.ModelAdmin): list_display = ('registry', 'name', 'sections') class RegistryAdmin(admin.ModelAdmin): def queryset(self, request): if not request.user.is_superuser: user = User.objects.get(user=request.user) return Registry.objects.filter(registry__in=[reg.id for reg in user.registry.all()]) return Registry.objects.all() + def has_add_permission(self, request): + if request.user.is_superuser: + return True + return False + admin.site.register(CDEPermittedValue) admin.site.register(CDEPermittedValueGroup) admin.site.register(CommonDataElement) admin.site.register(Wizard) admin.site.register(RegistryForm, RegistryFormAdmin) admin.site.register(Section, SectionAdmin) admin.site.register(Registry, RegistryAdmin)
4b340e0712956ea44eace7382dd743890958a0fd
widgets/card.py
widgets/card.py
from flask import render_template from models.person import Person def card(person_or_id, detailed=False, small=False): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() return render_template('widgets/card.html', person=person, detailed=detailed, small=small)
from flask import render_template from models.person import Person def card(person_or_id, **kwargs): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() return render_template('widgets/card.html', person=person, **kwargs)
Revert "Fix a bug in caching"
Revert "Fix a bug in caching" This reverts commit 2565df456ecb290f620ce4dadca19c76b0eeb1af. Conflicts: widgets/card.py
Python
apache-2.0
teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr
from flask import render_template from models.person import Person - def card(person_or_id, detailed=False, small=False): + def card(person_or_id, **kwargs): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() - return render_template('widgets/card.html', person=person, detailed=detailed, small=small) + return render_template('widgets/card.html', person=person, **kwargs)
Revert "Fix a bug in caching"
## Code Before: from flask import render_template from models.person import Person def card(person_or_id, detailed=False, small=False): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() return render_template('widgets/card.html', person=person, detailed=detailed, small=small) ## Instruction: Revert "Fix a bug in caching" ## Code After: from flask import render_template from models.person import Person def card(person_or_id, **kwargs): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() return render_template('widgets/card.html', person=person, **kwargs)
from flask import render_template from models.person import Person - def card(person_or_id, detailed=False, small=False): + def card(person_or_id, **kwargs): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() - return render_template('widgets/card.html', person=person, detailed=detailed, small=small) ? ^^^ ^^^^^^^^^^^^^^^ ---------- + return render_template('widgets/card.html', person=person, **kwargs) ? ^^^^ ^^
5076055b54d18ea2441abaf604a4ea4dd79353c5
cybox/test/objects/__init__.py
cybox/test/objects/__init__.py
import cybox.utils class ObjectTestCase(object): """A base class for testing all subclasses of ObjectProperties. Each subclass of ObjectTestCase should subclass both unittest.TestCase and ObjectTestCase, and defined two class-level fields: - klass: the ObjectProperties subclass being tested - object_type: The name prefix used in the XML Schema bindings for the object. """ def test_type_exists(self): # Verify that the correct class has been added to the OBJECTS # dictionary in cybox.utils print(type(self)) if type(self) == type(ObjectTestCase): return t = self.__class__.object_type c = self.__class__.klass self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
import cybox.utils class ObjectTestCase(object): """A base class for testing all subclasses of ObjectProperties. Each subclass of ObjectTestCase should subclass both unittest.TestCase and ObjectTestCase, and defined two class-level fields: - klass: the ObjectProperties subclass being tested - object_type: The name prefix used in the XML Schema bindings for the object. """ def test_type_exists(self): # Verify that the correct class has been added to the OBJECT_TYPES_DICT # dictionary in cybox.utils.nsparser # Skip this base class if type(self) == type(ObjectTestCase): return t = self.__class__.object_type expected_class = cybox.utils.get_class_for_object_type(t) actual_class = self.__class__.klass self.assertEqual(expected_class, actual_class) expected_namespace = expected_class._XSI_NS actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix') self.assertEqual(expected_namespace, actual_namespace) self.assertEqual(expected_class._XSI_TYPE, t)
Expand default testing on new object types
Expand default testing on new object types
Python
bsd-3-clause
CybOXProject/python-cybox
import cybox.utils class ObjectTestCase(object): """A base class for testing all subclasses of ObjectProperties. Each subclass of ObjectTestCase should subclass both unittest.TestCase and ObjectTestCase, and defined two class-level fields: - klass: the ObjectProperties subclass being tested - object_type: The name prefix used in the XML Schema bindings for the object. """ def test_type_exists(self): - # Verify that the correct class has been added to the OBJECTS + # Verify that the correct class has been added to the OBJECT_TYPES_DICT - # dictionary in cybox.utils + # dictionary in cybox.utils.nsparser - print(type(self)) + + # Skip this base class if type(self) == type(ObjectTestCase): return + t = self.__class__.object_type - c = self.__class__.klass - self.assertEqual(cybox.utils.get_class_for_object_type(t), c) + expected_class = cybox.utils.get_class_for_object_type(t) + actual_class = self.__class__.klass + + self.assertEqual(expected_class, actual_class) + + expected_namespace = expected_class._XSI_NS + actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix') + self.assertEqual(expected_namespace, actual_namespace) + + self.assertEqual(expected_class._XSI_TYPE, t) +
Expand default testing on new object types
## Code Before: import cybox.utils class ObjectTestCase(object): """A base class for testing all subclasses of ObjectProperties. Each subclass of ObjectTestCase should subclass both unittest.TestCase and ObjectTestCase, and defined two class-level fields: - klass: the ObjectProperties subclass being tested - object_type: The name prefix used in the XML Schema bindings for the object. """ def test_type_exists(self): # Verify that the correct class has been added to the OBJECTS # dictionary in cybox.utils print(type(self)) if type(self) == type(ObjectTestCase): return t = self.__class__.object_type c = self.__class__.klass self.assertEqual(cybox.utils.get_class_for_object_type(t), c) ## Instruction: Expand default testing on new object types ## Code After: import cybox.utils class ObjectTestCase(object): """A base class for testing all subclasses of ObjectProperties. Each subclass of ObjectTestCase should subclass both unittest.TestCase and ObjectTestCase, and defined two class-level fields: - klass: the ObjectProperties subclass being tested - object_type: The name prefix used in the XML Schema bindings for the object. """ def test_type_exists(self): # Verify that the correct class has been added to the OBJECT_TYPES_DICT # dictionary in cybox.utils.nsparser # Skip this base class if type(self) == type(ObjectTestCase): return t = self.__class__.object_type expected_class = cybox.utils.get_class_for_object_type(t) actual_class = self.__class__.klass self.assertEqual(expected_class, actual_class) expected_namespace = expected_class._XSI_NS actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix') self.assertEqual(expected_namespace, actual_namespace) self.assertEqual(expected_class._XSI_TYPE, t)
import cybox.utils class ObjectTestCase(object): """A base class for testing all subclasses of ObjectProperties. Each subclass of ObjectTestCase should subclass both unittest.TestCase and ObjectTestCase, and defined two class-level fields: - klass: the ObjectProperties subclass being tested - object_type: The name prefix used in the XML Schema bindings for the object. """ def test_type_exists(self): - # Verify that the correct class has been added to the OBJECTS + # Verify that the correct class has been added to the OBJECT_TYPES_DICT ? +++++ +++++ - # dictionary in cybox.utils + # dictionary in cybox.utils.nsparser ? +++++++++ - print(type(self)) + + # Skip this base class if type(self) == type(ObjectTestCase): return + t = self.__class__.object_type + + expected_class = cybox.utils.get_class_for_object_type(t) - c = self.__class__.klass + actual_class = self.__class__.klass ? + ++++++++++ - self.assertEqual(cybox.utils.get_class_for_object_type(t), c) + + self.assertEqual(expected_class, actual_class) + + expected_namespace = expected_class._XSI_NS + actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix') + self.assertEqual(expected_namespace, actual_namespace) + + self.assertEqual(expected_class._XSI_TYPE, t)
a4264c610f33640ac773ca0b12912f3ad972d966
feedback/admin.py
feedback/admin.py
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin)
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
Add Admin action to feedbacks
Add Admin action to feedbacks
Python
mit
n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] + actions = ['to_archive'] + + def to_archive(self, request, queryset): + queryset.update(archive=True) + to_archive.short_description = "Markierte Einträge archivieren" + admin.site.register(Feedback, FeedbackAdmin)
Add Admin action to feedbacks
## Code Before: from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin) ## Instruction: Add Admin action to feedbacks ## Code After: from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] + actions = ['to_archive'] + + def to_archive(self, request, queryset): + queryset.update(archive=True) + to_archive.short_description = "Markierte Einträge archivieren" + admin.site.register(Feedback, FeedbackAdmin)
e284968b7e234b30a8a593b298a96b78bb151c03
pyscf/pbc/tdscf/rhf_slow.py
pyscf/pbc/tdscf/rhf_slow.py
from pyscf.tdscf.rhf_slow import *
# This module is simply an alias of the molecular code from pyscf.tdscf.rhf_slow import *
Extend a docstring in PBC-Gamma TDHF
Extend a docstring in PBC-Gamma TDHF
Python
apache-2.0
gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf
+ + # This module is simply an alias of the molecular code from pyscf.tdscf.rhf_slow import *
Extend a docstring in PBC-Gamma TDHF
## Code Before: from pyscf.tdscf.rhf_slow import * ## Instruction: Extend a docstring in PBC-Gamma TDHF ## Code After: # This module is simply an alias of the molecular code from pyscf.tdscf.rhf_slow import *
+ + # This module is simply an alias of the molecular code from pyscf.tdscf.rhf_slow import *
ee7147e6d781a92d0ded0e094cc01a187fcb64ae
openstates/people.py
openstates/people.py
from pupa.scrape import Legislator from .base import OpenstatesBaseScraper class OpenstatesPersonScraper(OpenstatesBaseScraper): def scrape_legislator(self, legislator_id): old = self.api('legislators/' + legislator_id + '?') old.pop('country', None) old.pop('level', None) new = Legislator(name=old['full_name'], image=old['photo_url']) return new def scrape(self, apikey=None): if apikey: self.apikey = apikey if not self.apikey: print('apikey not set') return # TODO: change this to just get ids, then scrape legislator can take an id # and get the data it it leaving behind here method = 'legislators/?state={}&fields=id'.format(self.state) for result in self.api(method): yield self.scrape_legislator(result['id'])
from pupa.scrape import Legislator from .base import OpenstatesBaseScraper class OpenstatesPersonScraper(OpenstatesBaseScraper): def scrape_legislator(self, legislator_id): old = self.api('legislators/' + legislator_id + '?') old.pop('country', None) old.pop('level', None) new = Legislator(name=old['full_name'], image=old['photo_url']) return new def scrape(self): method = 'legislators/?state={}&fields=id'.format(self.state) for result in self.api(method): yield self.scrape_legislator(result['id'])
Move the APIKey bits out to the init
Move the APIKey bits out to the init
Python
bsd-3-clause
openstates/billy,openstates/billy,sunlightlabs/billy,openstates/billy,sunlightlabs/billy,sunlightlabs/billy
from pupa.scrape import Legislator from .base import OpenstatesBaseScraper class OpenstatesPersonScraper(OpenstatesBaseScraper): def scrape_legislator(self, legislator_id): old = self.api('legislators/' + legislator_id + '?') old.pop('country', None) old.pop('level', None) new = Legislator(name=old['full_name'], image=old['photo_url']) return new - def scrape(self, apikey=None): + def scrape(self): - if apikey: - self.apikey = apikey - if not self.apikey: - print('apikey not set') - return - - # TODO: change this to just get ids, then scrape legislator can take an id - # and get the data it it leaving behind here method = 'legislators/?state={}&fields=id'.format(self.state) for result in self.api(method): yield self.scrape_legislator(result['id'])
Move the APIKey bits out to the init
## Code Before: from pupa.scrape import Legislator from .base import OpenstatesBaseScraper class OpenstatesPersonScraper(OpenstatesBaseScraper): def scrape_legislator(self, legislator_id): old = self.api('legislators/' + legislator_id + '?') old.pop('country', None) old.pop('level', None) new = Legislator(name=old['full_name'], image=old['photo_url']) return new def scrape(self, apikey=None): if apikey: self.apikey = apikey if not self.apikey: print('apikey not set') return # TODO: change this to just get ids, then scrape legislator can take an id # and get the data it it leaving behind here method = 'legislators/?state={}&fields=id'.format(self.state) for result in self.api(method): yield self.scrape_legislator(result['id']) ## Instruction: Move the APIKey bits out to the init ## Code After: from pupa.scrape import Legislator from .base import OpenstatesBaseScraper class OpenstatesPersonScraper(OpenstatesBaseScraper): def scrape_legislator(self, legislator_id): old = self.api('legislators/' + legislator_id + '?') old.pop('country', None) old.pop('level', None) new = Legislator(name=old['full_name'], image=old['photo_url']) return new def scrape(self): method = 'legislators/?state={}&fields=id'.format(self.state) for result in self.api(method): yield self.scrape_legislator(result['id'])
from pupa.scrape import Legislator from .base import OpenstatesBaseScraper class OpenstatesPersonScraper(OpenstatesBaseScraper): def scrape_legislator(self, legislator_id): old = self.api('legislators/' + legislator_id + '?') old.pop('country', None) old.pop('level', None) new = Legislator(name=old['full_name'], image=old['photo_url']) return new - def scrape(self, apikey=None): ? ------------- + def scrape(self): - if apikey: - self.apikey = apikey - if not self.apikey: - print('apikey not set') - return - - # TODO: change this to just get ids, then scrape legislator can take an id - # and get the data it it leaving behind here method = 'legislators/?state={}&fields=id'.format(self.state) for result in self.api(method): yield self.scrape_legislator(result['id'])
701402c4a51474b244ff28dd2d5c9a0731440308
mozcal/events/api.py
mozcal/events/api.py
from tastypie.resources import ModelResource from models import Event class EventResource(ModelResource): class Meta: queryset = Event.objects.all()
from tastypie.resources import ModelResource from models import Event class EventResource(ModelResource): class Meta: queryset = Event.objects.all() filtering = { "title": ('startswith',), }
Allow filtering of event by title
Allow filtering of event by title
Python
bsd-3-clause
ppapadeas/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents
from tastypie.resources import ModelResource from models import Event class EventResource(ModelResource): class Meta: queryset = Event.objects.all() - + filtering = { + "title": ('startswith',), + }
Allow filtering of event by title
## Code Before: from tastypie.resources import ModelResource from models import Event class EventResource(ModelResource): class Meta: queryset = Event.objects.all() ## Instruction: Allow filtering of event by title ## Code After: from tastypie.resources import ModelResource from models import Event class EventResource(ModelResource): class Meta: queryset = Event.objects.all() filtering = { "title": ('startswith',), }
from tastypie.resources import ModelResource from models import Event class EventResource(ModelResource): class Meta: queryset = Event.objects.all() + filtering = { + "title": ('startswith',), + }
fb91bf1e7c1677124f4aa1ce9c534fb437145980
pygametemplate/helper.py
pygametemplate/helper.py
"""Module containing helper functions for using pygame.""" def load_class_assets(calling_object, assets_dict): """Load class assets. Only call if class_assets_loaded is False.""" calling_class = type(calling_object) for attribute_name in assets_dict: setattr(calling_class, attribute_name, assets_dict[attribute_name]) setattr(calling_class, "class_assets_loaded", True) def wrap_text(text, font, max_width): """ Returns an array of lines which can be blitted beneath each other in the given font in a box of the given maximum width. """ def wrap_paragraph(paragraph): """Wraps text that doesn't contain newlines.""" def too_long(string): return font.size(string)[0] > max_width def raise_word_too_long_error(word): raise ValueError("\"%s\" is too long to be wrapped." % word) lines = [] words = paragraph.split() line = words.pop(0) if too_long(line): raise_word_too_long_error(line) for word in words: if too_long(word): raise_word_too_long_error(word) if too_long(" ".join((line, word))): lines.append(line) line = word else: line = " ".join((line, word)) lines.append(line) return lines paragraphs = text.split("\n") return sum(map(wrap_paragraph, paragraphs), [])
"""Module containing helper functions for using pygame.""" def load_class_assets(calling_object, assets_dict): """Load class assets. Only call if class_assets_loaded is False.""" calling_class = type(calling_object) for attribute_name in assets_dict: setattr(calling_class, attribute_name, assets_dict[attribute_name]) setattr(calling_class, "class_assets_loaded", True) def wrap_text(text, font, max_width): """ Returns an array of lines which can be blitted beneath each other in the given font in a box of the given maximum width. """ def wrap_paragraph(paragraph): """Wraps text that doesn't contain newlines.""" def too_long(string): return font.size(string)[0] > max_width def raise_word_too_long_error(word): raise ValueError(f"'{word}' is too long to be wrapped.") lines = [] words = paragraph.split() line = words.pop(0) if too_long(line): raise_word_too_long_error(line) for word in words: if too_long(word): raise_word_too_long_error(word) if too_long(" ".join((line, word))): lines.append(line) line = word else: line = " ".join((line, word)) lines.append(line) return lines paragraphs = text.split("\n") return sum(map(wrap_paragraph, paragraphs), [])
Replace % with f-string :)
Replace % with f-string :)
Python
mit
AndyDeany/pygame-template
"""Module containing helper functions for using pygame.""" def load_class_assets(calling_object, assets_dict): """Load class assets. Only call if class_assets_loaded is False.""" calling_class = type(calling_object) for attribute_name in assets_dict: setattr(calling_class, attribute_name, assets_dict[attribute_name]) setattr(calling_class, "class_assets_loaded", True) def wrap_text(text, font, max_width): """ Returns an array of lines which can be blitted beneath each other in the given font in a box of the given maximum width. """ def wrap_paragraph(paragraph): """Wraps text that doesn't contain newlines.""" def too_long(string): return font.size(string)[0] > max_width def raise_word_too_long_error(word): - raise ValueError("\"%s\" is too long to be wrapped." % word) + raise ValueError(f"'{word}' is too long to be wrapped.") lines = [] words = paragraph.split() line = words.pop(0) if too_long(line): raise_word_too_long_error(line) for word in words: if too_long(word): raise_word_too_long_error(word) if too_long(" ".join((line, word))): lines.append(line) line = word else: line = " ".join((line, word)) lines.append(line) return lines paragraphs = text.split("\n") return sum(map(wrap_paragraph, paragraphs), [])
Replace % with f-string :)
## Code Before: """Module containing helper functions for using pygame.""" def load_class_assets(calling_object, assets_dict): """Load class assets. Only call if class_assets_loaded is False.""" calling_class = type(calling_object) for attribute_name in assets_dict: setattr(calling_class, attribute_name, assets_dict[attribute_name]) setattr(calling_class, "class_assets_loaded", True) def wrap_text(text, font, max_width): """ Returns an array of lines which can be blitted beneath each other in the given font in a box of the given maximum width. """ def wrap_paragraph(paragraph): """Wraps text that doesn't contain newlines.""" def too_long(string): return font.size(string)[0] > max_width def raise_word_too_long_error(word): raise ValueError("\"%s\" is too long to be wrapped." % word) lines = [] words = paragraph.split() line = words.pop(0) if too_long(line): raise_word_too_long_error(line) for word in words: if too_long(word): raise_word_too_long_error(word) if too_long(" ".join((line, word))): lines.append(line) line = word else: line = " ".join((line, word)) lines.append(line) return lines paragraphs = text.split("\n") return sum(map(wrap_paragraph, paragraphs), []) ## Instruction: Replace % with f-string :) ## Code After: """Module containing helper functions for using pygame.""" def load_class_assets(calling_object, assets_dict): """Load class assets. Only call if class_assets_loaded is False.""" calling_class = type(calling_object) for attribute_name in assets_dict: setattr(calling_class, attribute_name, assets_dict[attribute_name]) setattr(calling_class, "class_assets_loaded", True) def wrap_text(text, font, max_width): """ Returns an array of lines which can be blitted beneath each other in the given font in a box of the given maximum width. """ def wrap_paragraph(paragraph): """Wraps text that doesn't contain newlines.""" def too_long(string): return font.size(string)[0] > max_width def raise_word_too_long_error(word): raise ValueError(f"'{word}' is too long to be wrapped.") lines = [] words = paragraph.split() line = words.pop(0) if too_long(line): raise_word_too_long_error(line) for word in words: if too_long(word): raise_word_too_long_error(word) if too_long(" ".join((line, word))): lines.append(line) line = word else: line = " ".join((line, word)) lines.append(line) return lines paragraphs = text.split("\n") return sum(map(wrap_paragraph, paragraphs), [])
"""Module containing helper functions for using pygame.""" def load_class_assets(calling_object, assets_dict): """Load class assets. Only call if class_assets_loaded is False.""" calling_class = type(calling_object) for attribute_name in assets_dict: setattr(calling_class, attribute_name, assets_dict[attribute_name]) setattr(calling_class, "class_assets_loaded", True) def wrap_text(text, font, max_width): """ Returns an array of lines which can be blitted beneath each other in the given font in a box of the given maximum width. """ def wrap_paragraph(paragraph): """Wraps text that doesn't contain newlines.""" def too_long(string): return font.size(string)[0] > max_width def raise_word_too_long_error(word): - raise ValueError("\"%s\" is too long to be wrapped." % word) ? ^^^^^^ ------- + raise ValueError(f"'{word}' is too long to be wrapped.") ? + ^^^^^^^^ lines = [] words = paragraph.split() line = words.pop(0) if too_long(line): raise_word_too_long_error(line) for word in words: if too_long(word): raise_word_too_long_error(word) if too_long(" ".join((line, word))): lines.append(line) line = word else: line = " ".join((line, word)) lines.append(line) return lines paragraphs = text.split("\n") return sum(map(wrap_paragraph, paragraphs), [])
c326becad43949999d151cd1e10fcb75f9d2b148
lib/constants.py
lib/constants.py
SQL_PORT = 15000 JSON_RPC_PORT = 15598 HTTP_PORT = 15597 JSON_PUBSUB_PORT = 15596
SQL_PORT = 15000 JSON_RPC_PORT = 15598 HTTP_PORT = 15597 HTTPS_PORT = 443 JSON_PUBSUB_PORT = 15596
Add missing constant for ssl listener.
Add missing constant for ssl listener.
Python
apache-2.0
MediaMath/qasino,MediaMath/qasino
SQL_PORT = 15000 JSON_RPC_PORT = 15598 HTTP_PORT = 15597 + HTTPS_PORT = 443 JSON_PUBSUB_PORT = 15596
Add missing constant for ssl listener.
## Code Before: SQL_PORT = 15000 JSON_RPC_PORT = 15598 HTTP_PORT = 15597 JSON_PUBSUB_PORT = 15596 ## Instruction: Add missing constant for ssl listener. ## Code After: SQL_PORT = 15000 JSON_RPC_PORT = 15598 HTTP_PORT = 15597 HTTPS_PORT = 443 JSON_PUBSUB_PORT = 15596
SQL_PORT = 15000 JSON_RPC_PORT = 15598 HTTP_PORT = 15597 + HTTPS_PORT = 443 JSON_PUBSUB_PORT = 15596
0ef346389b680e81ab618d4d782239640c1926f5
tests/test_collection.py
tests/test_collection.py
import unittest from indigo.models import Collection from indigo.models.errors import UniqueException from nose.tools import raises class NodeTest(unittest.TestCase): def test_a_create_root(self): Collection.create(name="test_root", parent=None, path="/") coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.path == '/' assert coll.parent is None # Make sure this is the root collection root = Collection.get_root_collection() assert root.id == coll.id def test_create_with_children(self): coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.is_root child1 = Collection.create(name="child1", parent=str(coll.id)) child2 = Collection.create(name="child2", parent=str(coll.id)) assert child1.get_parent_collection().id == coll.id assert child2.get_parent_collection().id == coll.id assert child1.path == '/child1/' assert child2.path == '/child2/' children = coll.get_child_collections() assert len(children) == 2 assert coll.get_child_collection_count() == 2 assert str(children[0].id) == str(child1.id) assert str(children[1].id) == str(child2.id)
import unittest from indigo.models import Collection from indigo.models.errors import UniqueException from nose.tools import raises class NodeTest(unittest.TestCase): def test_a_create_root(self): Collection.create(name="test_root", parent=None, path="/") coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.path == '/' assert coll.parent is None # Make sure this is the root collection root = Collection.get_root_collection() assert root.id == coll.id def test_create_with_children(self): coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.is_root child1 = Collection.create(name="child1", parent=str(coll.id)) child2 = Collection.create(name="child2", parent=str(coll.id)) assert child1.get_parent_collection().id == coll.id assert child2.get_parent_collection().id == coll.id assert child1.path == '/child1/' assert child2.path == '/child2/' children = coll.get_child_collections() assert len(children) == 2 assert coll.get_child_collection_count() == 2
Remove unnecessary test of collection children
Remove unnecessary test of collection children
Python
agpl-3.0
UMD-DRASTIC/drastic
import unittest from indigo.models import Collection from indigo.models.errors import UniqueException from nose.tools import raises class NodeTest(unittest.TestCase): def test_a_create_root(self): Collection.create(name="test_root", parent=None, path="/") coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.path == '/' assert coll.parent is None # Make sure this is the root collection root = Collection.get_root_collection() assert root.id == coll.id def test_create_with_children(self): coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.is_root child1 = Collection.create(name="child1", parent=str(coll.id)) child2 = Collection.create(name="child2", parent=str(coll.id)) assert child1.get_parent_collection().id == coll.id assert child2.get_parent_collection().id == coll.id assert child1.path == '/child1/' assert child2.path == '/child2/' children = coll.get_child_collections() assert len(children) == 2 assert coll.get_child_collection_count() == 2 + - assert str(children[0].id) == str(child1.id) - assert str(children[1].id) == str(child2.id)
Remove unnecessary test of collection children
## Code Before: import unittest from indigo.models import Collection from indigo.models.errors import UniqueException from nose.tools import raises class NodeTest(unittest.TestCase): def test_a_create_root(self): Collection.create(name="test_root", parent=None, path="/") coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.path == '/' assert coll.parent is None # Make sure this is the root collection root = Collection.get_root_collection() assert root.id == coll.id def test_create_with_children(self): coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.is_root child1 = Collection.create(name="child1", parent=str(coll.id)) child2 = Collection.create(name="child2", parent=str(coll.id)) assert child1.get_parent_collection().id == coll.id assert child2.get_parent_collection().id == coll.id assert child1.path == '/child1/' assert child2.path == '/child2/' children = coll.get_child_collections() assert len(children) == 2 assert coll.get_child_collection_count() == 2 assert str(children[0].id) == str(child1.id) assert str(children[1].id) == str(child2.id) ## Instruction: Remove unnecessary test of collection children ## Code After: import unittest from indigo.models import Collection from indigo.models.errors import UniqueException from nose.tools import raises class NodeTest(unittest.TestCase): def test_a_create_root(self): Collection.create(name="test_root", parent=None, path="/") coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.path == '/' assert coll.parent is None # Make sure this is the root collection root = Collection.get_root_collection() assert root.id == coll.id def test_create_with_children(self): coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.is_root child1 = Collection.create(name="child1", parent=str(coll.id)) child2 = Collection.create(name="child2", parent=str(coll.id)) assert child1.get_parent_collection().id == coll.id assert child2.get_parent_collection().id == coll.id assert child1.path == '/child1/' assert child2.path == '/child2/' children = coll.get_child_collections() assert len(children) == 2 assert coll.get_child_collection_count() == 2
import unittest from indigo.models import Collection from indigo.models.errors import UniqueException from nose.tools import raises class NodeTest(unittest.TestCase): def test_a_create_root(self): Collection.create(name="test_root", parent=None, path="/") coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.path == '/' assert coll.parent is None # Make sure this is the root collection root = Collection.get_root_collection() assert root.id == coll.id def test_create_with_children(self): coll = Collection.find("test_root") assert coll.name == "test_root" assert coll.is_root child1 = Collection.create(name="child1", parent=str(coll.id)) child2 = Collection.create(name="child2", parent=str(coll.id)) assert child1.get_parent_collection().id == coll.id assert child2.get_parent_collection().id == coll.id assert child1.path == '/child1/' assert child2.path == '/child2/' children = coll.get_child_collections() assert len(children) == 2 assert coll.get_child_collection_count() == 2 - assert str(children[0].id) == str(child1.id) - assert str(children[1].id) == str(child2.id)
981d0473a24d52fb19e8da1a2af18c9f8823dd29
heufybot/factory.py
heufybot/factory.py
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory from heufybot.connection import HeufyBotConnection class HeufyBotFactory(ReconnectingClientFactory): protocol = HeufyBotConnection def __init__(self, bot): self.bot = bot self.currentlyDisconnecting = [] def buildProtocol(self, addr): self.resetDelay() return self.protocol(self.bot) def clientConnectionFailed(self, connector, reason): self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).", connector=connector, reason=reason) ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) def clientConnectionLost(self, connector, reason): # Disable modules if connector.host in self.bot.moduleHandler.enabledModules: for module in self.bot.moduleHandler.enabledModules[connector.host]: self.bot.moduleHandler.disableModule(module, connector.host, True) del self.bot.servers[connector.host] # Check whether or not we should reconnect if connector.host in self.currentlyDisconnecting: self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector) ClientFactory.clientConnectionLost(self, connector, reason) self.currentlyDisconnecting.remove(connector.host) self.bot.countConnections() else: ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory from heufybot.connection import HeufyBotConnection class HeufyBotFactory(ReconnectingClientFactory): protocol = HeufyBotConnection def __init__(self, bot): self.bot = bot self.currentlyDisconnecting = [] def buildProtocol(self, addr): self.resetDelay() return self.protocol(self.bot) def clientConnectionFailed(self, connector, reason): self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).", connector=connector, reason=reason) ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) def clientConnectionLost(self, connector, reason): # Disable modules if connector.host in self.bot.moduleHandler.enabledModules: for module in self.bot.moduleHandler.enabledModules[connector.host]: self.bot.moduleHandler.disableModule(module, connector.host, True) self.bot.moduleHandler.runGenericAction("disconnect", connector.host) del self.bot.servers[connector.host] # Check whether or not we should reconnect if connector.host in self.currentlyDisconnecting: self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector) ClientFactory.clientConnectionLost(self, connector, reason) self.currentlyDisconnecting.remove(connector.host) self.bot.countConnections() else: ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
Add an action for server disconnects
Add an action for server disconnects
Python
mit
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory from heufybot.connection import HeufyBotConnection class HeufyBotFactory(ReconnectingClientFactory): protocol = HeufyBotConnection def __init__(self, bot): self.bot = bot self.currentlyDisconnecting = [] def buildProtocol(self, addr): self.resetDelay() return self.protocol(self.bot) def clientConnectionFailed(self, connector, reason): self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).", connector=connector, reason=reason) ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) def clientConnectionLost(self, connector, reason): # Disable modules if connector.host in self.bot.moduleHandler.enabledModules: for module in self.bot.moduleHandler.enabledModules[connector.host]: self.bot.moduleHandler.disableModule(module, connector.host, True) + self.bot.moduleHandler.runGenericAction("disconnect", connector.host) del self.bot.servers[connector.host] # Check whether or not we should reconnect if connector.host in self.currentlyDisconnecting: self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector) ClientFactory.clientConnectionLost(self, connector, reason) self.currentlyDisconnecting.remove(connector.host) self.bot.countConnections() else: ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
Add an action for server disconnects
## Code Before: from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory from heufybot.connection import HeufyBotConnection class HeufyBotFactory(ReconnectingClientFactory): protocol = HeufyBotConnection def __init__(self, bot): self.bot = bot self.currentlyDisconnecting = [] def buildProtocol(self, addr): self.resetDelay() return self.protocol(self.bot) def clientConnectionFailed(self, connector, reason): self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).", connector=connector, reason=reason) ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) def clientConnectionLost(self, connector, reason): # Disable modules if connector.host in self.bot.moduleHandler.enabledModules: for module in self.bot.moduleHandler.enabledModules[connector.host]: self.bot.moduleHandler.disableModule(module, connector.host, True) del self.bot.servers[connector.host] # Check whether or not we should reconnect if connector.host in self.currentlyDisconnecting: self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector) ClientFactory.clientConnectionLost(self, connector, reason) self.currentlyDisconnecting.remove(connector.host) self.bot.countConnections() else: ReconnectingClientFactory.clientConnectionLost(self, connector, reason) ## Instruction: Add an action for server disconnects ## Code After: from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory from heufybot.connection import HeufyBotConnection class HeufyBotFactory(ReconnectingClientFactory): protocol = HeufyBotConnection def __init__(self, bot): self.bot = bot self.currentlyDisconnecting = [] def buildProtocol(self, addr): self.resetDelay() return self.protocol(self.bot) def clientConnectionFailed(self, connector, reason): self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).", connector=connector, reason=reason) ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) def clientConnectionLost(self, connector, reason): # Disable modules if connector.host in self.bot.moduleHandler.enabledModules: for module in self.bot.moduleHandler.enabledModules[connector.host]: self.bot.moduleHandler.disableModule(module, connector.host, True) self.bot.moduleHandler.runGenericAction("disconnect", connector.host) del self.bot.servers[connector.host] # Check whether or not we should reconnect if connector.host in self.currentlyDisconnecting: self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector) ClientFactory.clientConnectionLost(self, connector, reason) self.currentlyDisconnecting.remove(connector.host) self.bot.countConnections() else: ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory from heufybot.connection import HeufyBotConnection class HeufyBotFactory(ReconnectingClientFactory): protocol = HeufyBotConnection def __init__(self, bot): self.bot = bot self.currentlyDisconnecting = [] def buildProtocol(self, addr): self.resetDelay() return self.protocol(self.bot) def clientConnectionFailed(self, connector, reason): self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).", connector=connector, reason=reason) ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) def clientConnectionLost(self, connector, reason): # Disable modules if connector.host in self.bot.moduleHandler.enabledModules: for module in self.bot.moduleHandler.enabledModules[connector.host]: self.bot.moduleHandler.disableModule(module, connector.host, True) + self.bot.moduleHandler.runGenericAction("disconnect", connector.host) del self.bot.servers[connector.host] # Check whether or not we should reconnect if connector.host in self.currentlyDisconnecting: self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector) ClientFactory.clientConnectionLost(self, connector, reason) self.currentlyDisconnecting.remove(connector.host) self.bot.countConnections() else: ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
e47ede85f2001cc5c514951355ded1253b4c45f7
notaro/apps.py
notaro/apps.py
from django.apps import AppConfig from watson import search as watson class NotaroConfig(AppConfig): name = "notaro" verbose_name = "Notizen" def ready(self): NoteModel = self.get_model("Note") watson.register(NoteModel.objects.filter(published=True)) SourceModel = self.get_model("Source") watson.register(SourceModel.objects.all()) DocumentModel = self.get_model("Document") watson.register(DocumentModel.objects.all(), exclude=('doc'))
from django.apps import AppConfig from watson import search as watson class NotaroConfig(AppConfig): name = "notaro" verbose_name = "Notizen" def ready(self): NoteModel = self.get_model("Note") watson.register(NoteModel.objects.filter(published=True)) SourceModel = self.get_model("Source") watson.register(SourceModel.objects.all()) DocumentModel = self.get_model("Document") watson.register(DocumentModel.objects.all(), exclude=('doc', 'image')) PictureModel = self.get_model("Picture") watson.register(PictureModel.objects.all(), exclude=('image'))
Fix watson settings; add search for picture
Fix watson settings; add search for picture
Python
bsd-3-clause
ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio
from django.apps import AppConfig from watson import search as watson class NotaroConfig(AppConfig): name = "notaro" verbose_name = "Notizen" def ready(self): NoteModel = self.get_model("Note") watson.register(NoteModel.objects.filter(published=True)) SourceModel = self.get_model("Source") watson.register(SourceModel.objects.all()) DocumentModel = self.get_model("Document") - watson.register(DocumentModel.objects.all(), exclude=('doc')) + watson.register(DocumentModel.objects.all(), exclude=('doc', 'image')) + PictureModel = self.get_model("Picture") + watson.register(PictureModel.objects.all(), exclude=('image')) +
Fix watson settings; add search for picture
## Code Before: from django.apps import AppConfig from watson import search as watson class NotaroConfig(AppConfig): name = "notaro" verbose_name = "Notizen" def ready(self): NoteModel = self.get_model("Note") watson.register(NoteModel.objects.filter(published=True)) SourceModel = self.get_model("Source") watson.register(SourceModel.objects.all()) DocumentModel = self.get_model("Document") watson.register(DocumentModel.objects.all(), exclude=('doc')) ## Instruction: Fix watson settings; add search for picture ## Code After: from django.apps import AppConfig from watson import search as watson class NotaroConfig(AppConfig): name = "notaro" verbose_name = "Notizen" def ready(self): NoteModel = self.get_model("Note") watson.register(NoteModel.objects.filter(published=True)) SourceModel = self.get_model("Source") watson.register(SourceModel.objects.all()) DocumentModel = self.get_model("Document") watson.register(DocumentModel.objects.all(), exclude=('doc', 'image')) PictureModel = self.get_model("Picture") watson.register(PictureModel.objects.all(), exclude=('image'))
from django.apps import AppConfig from watson import search as watson class NotaroConfig(AppConfig): name = "notaro" verbose_name = "Notizen" def ready(self): NoteModel = self.get_model("Note") watson.register(NoteModel.objects.filter(published=True)) SourceModel = self.get_model("Source") watson.register(SourceModel.objects.all()) DocumentModel = self.get_model("Document") - watson.register(DocumentModel.objects.all(), exclude=('doc')) + watson.register(DocumentModel.objects.all(), exclude=('doc', 'image')) ? +++++++++ + + PictureModel = self.get_model("Picture") + watson.register(PictureModel.objects.all(), exclude=('image'))
05adb44cdec74256fa44ce3a3df61c6525ce7fac
dryscrape/xvfb.py
dryscrape/xvfb.py
import atexit import os _xvfb = None def start_xvfb(): from xvfbwrapper import Xvfb global _xvfb if "DISPLAY" in os.environ: del os.environ["DISPLAY"] _xvfb = Xvfb() _xvfb.start() atexit.register(_xvfb.stop) def stop_xvfb(): global _xvfb _xvfb.stop()
import atexit import os _xvfb = None def start_xvfb(): from xvfbwrapper import Xvfb global _xvfb _xvfb = Xvfb() _xvfb.start() atexit.register(_xvfb.stop) def stop_xvfb(): global _xvfb _xvfb.stop()
Remove removal of DISPLAY environment variable
Remove removal of DISPLAY environment variable The issue has to do with the two lines: ` if "DISPLAY" in os.environ: del os.environ["DISPLAY"]` This seems to remove the DISPLAY environment variable unnecessarily, as on line 50 of xvfbwrapper.py, self.orig_display is set to the value of DISPLAY. self.orig_display is checked on line 83, which is where the error occurs. Because of xvfb.py removing the environment variable and self.orig_display being set to the original value, on line 84 when it tries to remove DISPLAY, it has already been removed by xvfb.py, so it throws a KeyError.
Python
mit
niklasb/dryscrape
import atexit import os _xvfb = None + def start_xvfb(): - from xvfbwrapper import Xvfb + from xvfbwrapper import Xvfb - global _xvfb + global _xvfb - if "DISPLAY" in os.environ: - del os.environ["DISPLAY"] - _xvfb = Xvfb() + _xvfb = Xvfb() - _xvfb.start() + _xvfb.start() - atexit.register(_xvfb.stop) + atexit.register(_xvfb.stop) + def stop_xvfb(): - global _xvfb + global _xvfb - _xvfb.stop() + _xvfb.stop()
Remove removal of DISPLAY environment variable
## Code Before: import atexit import os _xvfb = None def start_xvfb(): from xvfbwrapper import Xvfb global _xvfb if "DISPLAY" in os.environ: del os.environ["DISPLAY"] _xvfb = Xvfb() _xvfb.start() atexit.register(_xvfb.stop) def stop_xvfb(): global _xvfb _xvfb.stop() ## Instruction: Remove removal of DISPLAY environment variable ## Code After: import atexit import os _xvfb = None def start_xvfb(): from xvfbwrapper import Xvfb global _xvfb _xvfb = Xvfb() _xvfb.start() atexit.register(_xvfb.stop) def stop_xvfb(): global _xvfb _xvfb.stop()
import atexit import os _xvfb = None + def start_xvfb(): - from xvfbwrapper import Xvfb + from xvfbwrapper import Xvfb ? ++ - global _xvfb + global _xvfb ? ++ - if "DISPLAY" in os.environ: - del os.environ["DISPLAY"] - _xvfb = Xvfb() + _xvfb = Xvfb() ? ++ - _xvfb.start() + _xvfb.start() ? ++ - atexit.register(_xvfb.stop) + atexit.register(_xvfb.stop) ? ++ + def stop_xvfb(): - global _xvfb + global _xvfb ? ++ - _xvfb.stop() + _xvfb.stop() ? ++
8213a758782a7ab6cecc5a986e193f204fe57691
scrapy_gridfsfilespipeline/images.py
scrapy_gridfsfilespipeline/images.py
from scrapy.pipelines.images import ImagesPipeline from .files import GridFSFilesPipeline class GridFSImagesPipeline(ImagesPipeline, GridFSFilesPipeline): """ An extension of ImagesPipeline that store files in MongoDB GridFS. Is using a guid to check if the file exists in GridFS and MongoDB ObjectId to reference the file with item. ImagesPipeline was using a single variable 'path' for reference and identification. guid is used in MongoGridFSFilesPipeline because the pipeline needs a unique identifier generated based on file URL. MongoGridFSFilesPipeline is using ObjectId to reference the file because it's the primary key. """
from scrapy.pipelines.images import ImagesPipeline from .files import GridFSFilesPipeline class GridFSImagesPipeline(ImagesPipeline, GridFSFilesPipeline): """ An extension of ImagesPipeline that store files in MongoDB GridFS. Is using a guid to check if the file exists in GridFS and MongoDB ObjectId to reference the file with item. ImagesPipeline was using a single variable 'path' for reference and identification. guid is used in MongoGridFSFilesPipeline because the pipeline needs a unique identifier generated based on file URL. MongoGridFSFilesPipeline is using ObjectId to reference the file because it's the primary key. """ @classmethod def from_settings(cls, settings): store_uri = settings['MONGO_URI'] return cls(store_uri, settings=settings)
Add GridFSImagesPipeline.from_settings to use MONGO_URI
Add GridFSImagesPipeline.from_settings to use MONGO_URI
Python
bsd-2-clause
zahariesergiu/scrapy-gridfsfilespipeline
from scrapy.pipelines.images import ImagesPipeline from .files import GridFSFilesPipeline class GridFSImagesPipeline(ImagesPipeline, GridFSFilesPipeline): """ An extension of ImagesPipeline that store files in MongoDB GridFS. Is using a guid to check if the file exists in GridFS and MongoDB ObjectId to reference the file with item. ImagesPipeline was using a single variable 'path' for reference and identification. guid is used in MongoGridFSFilesPipeline because the pipeline needs a unique identifier generated based on file URL. MongoGridFSFilesPipeline is using ObjectId to reference the file because it's the primary key. """ + @classmethod + def from_settings(cls, settings): + store_uri = settings['MONGO_URI'] + return cls(store_uri, settings=settings) +
Add GridFSImagesPipeline.from_settings to use MONGO_URI
## Code Before: from scrapy.pipelines.images import ImagesPipeline from .files import GridFSFilesPipeline class GridFSImagesPipeline(ImagesPipeline, GridFSFilesPipeline): """ An extension of ImagesPipeline that store files in MongoDB GridFS. Is using a guid to check if the file exists in GridFS and MongoDB ObjectId to reference the file with item. ImagesPipeline was using a single variable 'path' for reference and identification. guid is used in MongoGridFSFilesPipeline because the pipeline needs a unique identifier generated based on file URL. MongoGridFSFilesPipeline is using ObjectId to reference the file because it's the primary key. """ ## Instruction: Add GridFSImagesPipeline.from_settings to use MONGO_URI ## Code After: from scrapy.pipelines.images import ImagesPipeline from .files import GridFSFilesPipeline class GridFSImagesPipeline(ImagesPipeline, GridFSFilesPipeline): """ An extension of ImagesPipeline that store files in MongoDB GridFS. Is using a guid to check if the file exists in GridFS and MongoDB ObjectId to reference the file with item. ImagesPipeline was using a single variable 'path' for reference and identification. guid is used in MongoGridFSFilesPipeline because the pipeline needs a unique identifier generated based on file URL. MongoGridFSFilesPipeline is using ObjectId to reference the file because it's the primary key. """ @classmethod def from_settings(cls, settings): store_uri = settings['MONGO_URI'] return cls(store_uri, settings=settings)
from scrapy.pipelines.images import ImagesPipeline from .files import GridFSFilesPipeline class GridFSImagesPipeline(ImagesPipeline, GridFSFilesPipeline): """ An extension of ImagesPipeline that store files in MongoDB GridFS. Is using a guid to check if the file exists in GridFS and MongoDB ObjectId to reference the file with item. ImagesPipeline was using a single variable 'path' for reference and identification. guid is used in MongoGridFSFilesPipeline because the pipeline needs a unique identifier generated based on file URL. MongoGridFSFilesPipeline is using ObjectId to reference the file because it's the primary key. """ + + @classmethod + def from_settings(cls, settings): + store_uri = settings['MONGO_URI'] + return cls(store_uri, settings=settings)
eab3d891d7b0460223990642251bec4bb377543d
website/addons/github/tests/factories.py
website/addons/github/tests/factories.py
from factory import Sequence, SubFactory from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings class GitHubAccountFactory(ExternalAccountFactory): provider = 'github' provider_id = Sequence(lambda n: 'id-{0}'.format(n)) oauth_key = Sequence(lambda n: 'key-{0}'.format(n)) class GitHubUserSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubUserSettings owner = SubFactory(UserFactory) class GitHubNodeSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubNodeSettings owner = SubFactory(ProjectFactory) user_settings = SubFactory(GitHubUserSettingsFactory) repo = 'mock' user = 'abc'
from factory import Sequence, SubFactory from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings class GitHubAccountFactory(ExternalAccountFactory): provider = 'github' provider_id = Sequence(lambda n: 'id-{0}'.format(n)) oauth_key = Sequence(lambda n: 'key-{0}'.format(n)) display_name = 'abc' class GitHubUserSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubUserSettings owner = SubFactory(UserFactory) class GitHubNodeSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubNodeSettings owner = SubFactory(ProjectFactory) user_settings = SubFactory(GitHubUserSettingsFactory) repo = 'mock' user = 'abc'
Include display_name in factory for tests
Include display_name in factory for tests
Python
apache-2.0
leb2dg/osf.io,doublebits/osf.io,DanielSBrown/osf.io,kwierman/osf.io,abought/osf.io,mluo613/osf.io,jnayak1/osf.io,cslzchen/osf.io,aaxelb/osf.io,pattisdr/osf.io,mluke93/osf.io,laurenrevere/osf.io,alexschiller/osf.io,kwierman/osf.io,wearpants/osf.io,acshi/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,chennan47/osf.io,kch8qx/osf.io,mattclark/osf.io,acshi/osf.io,erinspace/osf.io,binoculars/osf.io,sloria/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,adlius/osf.io,mluo613/osf.io,cslzchen/osf.io,chennan47/osf.io,caseyrollins/osf.io,asanfilippo7/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,emetsger/osf.io,TomHeatwole/osf.io,emetsger/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,chrisseto/osf.io,kwierman/osf.io,crcresearch/osf.io,chrisseto/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,CenterForOpenScience/osf.io,abought/osf.io,laurenrevere/osf.io,doublebits/osf.io,caseyrollins/osf.io,erinspace/osf.io,chrisseto/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,cslzchen/osf.io,alexschiller/osf.io,jnayak1/osf.io,emetsger/osf.io,RomanZWang/osf.io,zamattiac/osf.io,alexschiller/osf.io,doublebits/osf.io,mfraezz/osf.io,kwierman/osf.io,alexschiller/osf.io,saradbowman/osf.io,pattisdr/osf.io,Nesiehr/osf.io,mluke93/osf.io,RomanZWang/osf.io,chennan47/osf.io,zachjanicki/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,crcresearch/osf.io,pattisdr/osf.io,abought/osf.io,amyshi188/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,cwisecarver/osf.io,chrisseto/osf.io,hmoco/osf.io,acshi/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,adlius/osf.io,kch8qx/osf.io,mluo613/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,icereval/osf.io,rdhyee/osf.io,leb2dg/osf.io,zamattiac/osf.io,SSJohns/osf.io,icereval/osf.io,amyshi188/osf.io,rdhyee/osf.io,acshi/osf.io,caneruguz/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,zachjanicki/osf.io,mattclark/osf.io,jnayak1/osf.io,TomBaxter/osf.io,binoculars/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,samchrisinger/osf.io,SSJohns/osf.io,aaxelb/osf.io,doublebits/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,sloria/osf.io,DanielSBrown/osf.io,mluke93/osf.io,RomanZWang/osf.io,baylee-d/osf.io,acshi/osf.io,abought/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,adlius/osf.io,caneruguz/osf.io,RomanZWang/osf.io,hmoco/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,wearpants/osf.io,aaxelb/osf.io,wearpants/osf.io,sloria/osf.io,felliott/osf.io,TomBaxter/osf.io,rdhyee/osf.io,adlius/osf.io,Johnetordoff/osf.io,wearpants/osf.io,felliott/osf.io,emetsger/osf.io,mluo613/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,felliott/osf.io,cslzchen/osf.io,icereval/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,binoculars/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,erinspace/osf.io,zamattiac/osf.io,leb2dg/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,mattclark/osf.io,mluke93/osf.io,samchrisinger/osf.io,felliott/osf.io,caneruguz/osf.io,baylee-d/osf.io,cwisecarver/osf.io,amyshi188/osf.io
from factory import Sequence, SubFactory from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings class GitHubAccountFactory(ExternalAccountFactory): provider = 'github' provider_id = Sequence(lambda n: 'id-{0}'.format(n)) oauth_key = Sequence(lambda n: 'key-{0}'.format(n)) + display_name = 'abc' class GitHubUserSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubUserSettings owner = SubFactory(UserFactory) class GitHubNodeSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubNodeSettings owner = SubFactory(ProjectFactory) user_settings = SubFactory(GitHubUserSettingsFactory) repo = 'mock' user = 'abc'
Include display_name in factory for tests
## Code Before: from factory import Sequence, SubFactory from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings class GitHubAccountFactory(ExternalAccountFactory): provider = 'github' provider_id = Sequence(lambda n: 'id-{0}'.format(n)) oauth_key = Sequence(lambda n: 'key-{0}'.format(n)) class GitHubUserSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubUserSettings owner = SubFactory(UserFactory) class GitHubNodeSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubNodeSettings owner = SubFactory(ProjectFactory) user_settings = SubFactory(GitHubUserSettingsFactory) repo = 'mock' user = 'abc' ## Instruction: Include display_name in factory for tests ## Code After: from factory import Sequence, SubFactory from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings class GitHubAccountFactory(ExternalAccountFactory): provider = 'github' provider_id = Sequence(lambda n: 'id-{0}'.format(n)) oauth_key = Sequence(lambda n: 'key-{0}'.format(n)) display_name = 'abc' class GitHubUserSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubUserSettings owner = SubFactory(UserFactory) class GitHubNodeSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubNodeSettings owner = SubFactory(ProjectFactory) user_settings = SubFactory(GitHubUserSettingsFactory) repo = 'mock' user = 'abc'
from factory import Sequence, SubFactory from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory from website.addons.github.model import GitHubNodeSettings, GitHubUserSettings class GitHubAccountFactory(ExternalAccountFactory): provider = 'github' provider_id = Sequence(lambda n: 'id-{0}'.format(n)) oauth_key = Sequence(lambda n: 'key-{0}'.format(n)) + display_name = 'abc' class GitHubUserSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubUserSettings owner = SubFactory(UserFactory) class GitHubNodeSettingsFactory(ModularOdmFactory): FACTORY_FOR = GitHubNodeSettings owner = SubFactory(ProjectFactory) user_settings = SubFactory(GitHubUserSettingsFactory) repo = 'mock' user = 'abc'
52443c468a446638171f45b080dcf62f73e62866
src/wirecloud_fiware/tests/selenium.py
src/wirecloud_fiware/tests/selenium.py
from wirecloudcommons.test import WirecloudSeleniumTestCase __test__ = False class FiWareSeleniumTestCase(WirecloudSeleniumTestCase): tags = ('current',) def test_add_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') def test_delete_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') self.delete_marketplace('fiware')
from wirecloudcommons.test import WirecloudSeleniumTestCase __test__ = False class FiWareSeleniumTestCase(WirecloudSeleniumTestCase): def test_add_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') def test_delete_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') self.delete_marketplace('fiware')
Remove 'current' tag from FiWareSeleniumTestCase
Remove 'current' tag from FiWareSeleniumTestCase
Python
agpl-3.0
rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud
from wirecloudcommons.test import WirecloudSeleniumTestCase __test__ = False class FiWareSeleniumTestCase(WirecloudSeleniumTestCase): - - tags = ('current',) def test_add_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') def test_delete_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') self.delete_marketplace('fiware')
Remove 'current' tag from FiWareSeleniumTestCase
## Code Before: from wirecloudcommons.test import WirecloudSeleniumTestCase __test__ = False class FiWareSeleniumTestCase(WirecloudSeleniumTestCase): tags = ('current',) def test_add_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') def test_delete_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') self.delete_marketplace('fiware') ## Instruction: Remove 'current' tag from FiWareSeleniumTestCase ## Code After: from wirecloudcommons.test import WirecloudSeleniumTestCase __test__ = False class FiWareSeleniumTestCase(WirecloudSeleniumTestCase): def test_add_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') def test_delete_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') self.delete_marketplace('fiware')
from wirecloudcommons.test import WirecloudSeleniumTestCase __test__ = False class FiWareSeleniumTestCase(WirecloudSeleniumTestCase): - - tags = ('current',) def test_add_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') def test_delete_fiware_marketplace(self): self.login() self.add_marketplace('fiware', 'http://localhost:8080', 'fiware') self.delete_marketplace('fiware')
d3b526c5079dc61d3bb8a80363c9448de07da331
fabfile.py
fabfile.py
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate')
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
Make it easy to do a full deploy with fab
Make it easy to do a full deploy with fab
Python
mit
cgourlay/readthedocs.org,sunnyzwh/readthedocs.org,attakei/readthedocs-oauth,davidfischer/readthedocs.org,nikolas/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,johncosta/private-readthedocs.org,stevepiercy/readthedocs.org,mrshoki/readthedocs.org,michaelmcandrew/readthedocs.org,royalwang/readthedocs.org,cgourlay/readthedocs.org,tddv/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,raven47git/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,rtfd/readthedocs.org,CedarLogic/readthedocs.org,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,d0ugal/readthedocs.org,cgourlay/readthedocs.org,Carreau/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,safwanrahman/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,emawind84/readthedocs.org,mhils/readthedocs.org,SteveViss/readthedocs.org,alex/readthedocs.org,nikolas/readthedocs.org,d0ugal/readthedocs.org,stevepiercy/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,mhils/readthedocs.org,stevepiercy/readthedocs.org,alex/readthedocs.org,GovReady/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,raven47git/readthedocs.org,KamranMackey/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,jerel/readthedocs.org,ojii/readthedocs.org,alex/readthedocs.org,istresearch/readthedocs.org,Tazer/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,mhils/readthedocs.org,hach-que/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,gjtorikian/readthedocs.org,titiushko/readthedocs.org,Carreau/readthedocs.org,dirn/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,takluyver/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,GovReady/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,d0ugal/readthedocs.org,rtfd/readthedocs.org,hach-que/readthedocs.org,nyergler/pythonslides,royalwang/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,kenwang76/readthedocs.org,nyergler/pythonslides,CedarLogic/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,safwanrahman/readthedocs.org,singingwolfboy/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,nyergler/pythonslides,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,sils1297/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,wanghaven/readthedocs.org,KamranMackey/readthedocs.org,ojii/readthedocs.org,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,agjohnson/readthedocs.org,titiushko/readthedocs.org,singingwolfboy/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,alex/readthedocs.org,hach-que/readthedocs.org,VishvajitP/readthedocs.org,clarkperkins/readthedocs.org,ojii/readthedocs.org,SteveViss/readthedocs.org,mrshoki/readthedocs.org,kenwang76/readthedocs.org,pombredanne/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,kenshinthebattosai/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,gjtorikian/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,asampat3090/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,emawind84/readthedocs.org,emawind84/readthedocs.org,KamranMackey/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,VishvajitP/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,singingwolfboy/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,LukasBoersma/readthedocs.org,laplaceliu/readthedocs.org,michaelmcandrew/readthedocs.org,espdev/readthedocs.org,attakei/readthedocs-oauth,pombredanne/readthedocs.org,sils1297/readthedocs.org,asampat3090/readthedocs.org,raven47git/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' - def update_requirements(): - "Update requirements in the virtualenv." - run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) - def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') - def pull(): - "Pull new code" - with cd(env.code_dir): - run('git pull origin master') + def update_requirements(): + "Update requirements in the virtualenv." + run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) + + def migrate(project=None): + if project: + run('django-admin.py migrate %s' % project) + else: + run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") + def pull(): + "Pull new code" + with cd(env.code_dir): + run('git pull origin master') - def migrate(project=None): - if project: - run('django-admin.py migrate %s' % project) - else: - run('django-admin.py migrate') + def full_deploy(): + push() + update_requirements() + migrate() + restart() + celery() +
Make it easy to do a full deploy with fab
## Code Before: from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') ## Instruction: Make it easy to do a full deploy with fab ## Code After: from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' - def update_requirements(): - "Update requirements in the virtualenv." - run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) - def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') - def pull(): - "Pull new code" - with cd(env.code_dir): - run('git pull origin master') + def update_requirements(): + "Update requirements in the virtualenv." + run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) + + def migrate(project=None): + if project: + run('django-admin.py migrate %s' % project) + else: + run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") - def migrate(project=None): - if project: - run('django-admin.py migrate %s' % project) - else: - run('django-admin.py migrate') + def pull(): + "Pull new code" + with cd(env.code_dir): + run('git pull origin master') + + def full_deploy(): + push() + update_requirements() + migrate() + restart() + celery()
e9cb0bff470dc6bfc926f0b4ac6214ae8a028e61
vcr/files.py
vcr/files.py
import os import yaml from .cassette import Cassette def load_cassette(cassette_path): try: pc = yaml.load(open(cassette_path)) cassette = Cassette(pc) return cassette except IOError: return None def save_cassette(cassette_path, cassette): dirname, filename = os.path.split(cassette_path) if not os.path.exists(dirname): os.makedirs(dirname) with open(cassette_path, 'a') as cassette_file: cassette_file.write(yaml.dump(cassette.serialize()))
import os import yaml from .cassette import Cassette # Use the libYAML versions if possible try: from yaml import CLoader as Loader, CDumper as Dumper except ImportError: from yaml import Loader, Dumper def load_cassette(cassette_path): try: pc = yaml.load(open(cassette_path), Loader=Loader) cassette = Cassette(pc) return cassette except IOError: return None def save_cassette(cassette_path, cassette): dirname, filename = os.path.split(cassette_path) if not os.path.exists(dirname): os.makedirs(dirname) with open(cassette_path, 'a') as cassette_file: cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
Use the libYAML version of yaml if it's available
Use the libYAML version of yaml if it's available
Python
mit
ByteInternet/vcrpy,aclevy/vcrpy,ByteInternet/vcrpy,kevin1024/vcrpy,poussik/vcrpy,bcen/vcrpy,yarikoptic/vcrpy,agriffis/vcrpy,graingert/vcrpy,poussik/vcrpy,gwillem/vcrpy,mgeisler/vcrpy,kevin1024/vcrpy,IvanMalison/vcrpy,graingert/vcrpy
import os import yaml from .cassette import Cassette + # Use the libYAML versions if possible + try: + from yaml import CLoader as Loader, CDumper as Dumper + except ImportError: + from yaml import Loader, Dumper + def load_cassette(cassette_path): try: - pc = yaml.load(open(cassette_path)) + pc = yaml.load(open(cassette_path), Loader=Loader) cassette = Cassette(pc) return cassette except IOError: return None def save_cassette(cassette_path, cassette): dirname, filename = os.path.split(cassette_path) if not os.path.exists(dirname): os.makedirs(dirname) with open(cassette_path, 'a') as cassette_file: - cassette_file.write(yaml.dump(cassette.serialize())) + cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
Use the libYAML version of yaml if it's available
## Code Before: import os import yaml from .cassette import Cassette def load_cassette(cassette_path): try: pc = yaml.load(open(cassette_path)) cassette = Cassette(pc) return cassette except IOError: return None def save_cassette(cassette_path, cassette): dirname, filename = os.path.split(cassette_path) if not os.path.exists(dirname): os.makedirs(dirname) with open(cassette_path, 'a') as cassette_file: cassette_file.write(yaml.dump(cassette.serialize())) ## Instruction: Use the libYAML version of yaml if it's available ## Code After: import os import yaml from .cassette import Cassette # Use the libYAML versions if possible try: from yaml import CLoader as Loader, CDumper as Dumper except ImportError: from yaml import Loader, Dumper def load_cassette(cassette_path): try: pc = yaml.load(open(cassette_path), Loader=Loader) cassette = Cassette(pc) return cassette except IOError: return None def save_cassette(cassette_path, cassette): dirname, filename = os.path.split(cassette_path) if not os.path.exists(dirname): os.makedirs(dirname) with open(cassette_path, 'a') as cassette_file: cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
import os import yaml from .cassette import Cassette + # Use the libYAML versions if possible + try: + from yaml import CLoader as Loader, CDumper as Dumper + except ImportError: + from yaml import Loader, Dumper + def load_cassette(cassette_path): try: - pc = yaml.load(open(cassette_path)) + pc = yaml.load(open(cassette_path), Loader=Loader) ? +++++++++++++++ cassette = Cassette(pc) return cassette except IOError: return None def save_cassette(cassette_path, cassette): dirname, filename = os.path.split(cassette_path) if not os.path.exists(dirname): os.makedirs(dirname) with open(cassette_path, 'a') as cassette_file: - cassette_file.write(yaml.dump(cassette.serialize())) + cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper)) ? +++++++++++++++
94e68ff420ecb07ad830e213b38863bf34b7f85c
autocomplete_light/urls.py
autocomplete_light/urls.py
from django import VERSION from .views import AutocompleteView, RegistryView try: from django.conf.urls import patterns, url except ImportError: # Django < 1.5 from django.conf.urls.defaults import patterns, url urlpatterns = [ url(r'^(?P<autocomplete>[-\w]+)/$', AutocompleteView.as_view(), name='autocomplete_light_autocomplete' ), url(r'^$', RegistryView.as_view(), name='autocomplete_light_registry' ), ] if VERSION < (1, 9): urlpatterns = patterns('', *urlpatterns)
from django import VERSION from .views import AutocompleteView, RegistryView if VERSION > (1, 9): from django.conf.urls import url else: try: from django.conf.urls import patterns, url except ImportError: # Django < 1.5 from django.conf.urls.defaults import patterns, url urlpatterns = [ url(r'^(?P<autocomplete>[-\w]+)/$', AutocompleteView.as_view(), name='autocomplete_light_autocomplete' ), url(r'^$', RegistryView.as_view(), name='autocomplete_light_registry' ), ] if VERSION < (1, 9): urlpatterns = patterns('', *urlpatterns)
Fix Django 1.9 import error
Fix Django 1.9 import error
Python
mit
shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,shubhamdipt/django-autocomplete-light,dsanders11/django-autocomplete-light,Perkville/django-autocomplete-light,Perkville/django-autocomplete-light,Eraldo/django-autocomplete-light,luzfcb/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,Perkville/django-autocomplete-light,shubhamdipt/django-autocomplete-light,yourlabs/django-autocomplete-light,Eraldo/django-autocomplete-light,Eraldo/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,Eraldo/django-autocomplete-light,luzfcb/django-autocomplete-light,Perkville/django-autocomplete-light
from django import VERSION from .views import AutocompleteView, RegistryView - try: + if VERSION > (1, 9): + from django.conf.urls import url + else: + try: - from django.conf.urls import patterns, url + from django.conf.urls import patterns, url - except ImportError: + except ImportError: - # Django < 1.5 + # Django < 1.5 - from django.conf.urls.defaults import patterns, url + from django.conf.urls.defaults import patterns, url urlpatterns = [ url(r'^(?P<autocomplete>[-\w]+)/$', AutocompleteView.as_view(), name='autocomplete_light_autocomplete' ), url(r'^$', RegistryView.as_view(), name='autocomplete_light_registry' ), ] if VERSION < (1, 9): urlpatterns = patterns('', *urlpatterns)
Fix Django 1.9 import error
## Code Before: from django import VERSION from .views import AutocompleteView, RegistryView try: from django.conf.urls import patterns, url except ImportError: # Django < 1.5 from django.conf.urls.defaults import patterns, url urlpatterns = [ url(r'^(?P<autocomplete>[-\w]+)/$', AutocompleteView.as_view(), name='autocomplete_light_autocomplete' ), url(r'^$', RegistryView.as_view(), name='autocomplete_light_registry' ), ] if VERSION < (1, 9): urlpatterns = patterns('', *urlpatterns) ## Instruction: Fix Django 1.9 import error ## Code After: from django import VERSION from .views import AutocompleteView, RegistryView if VERSION > (1, 9): from django.conf.urls import url else: try: from django.conf.urls import patterns, url except ImportError: # Django < 1.5 from django.conf.urls.defaults import patterns, url urlpatterns = [ url(r'^(?P<autocomplete>[-\w]+)/$', AutocompleteView.as_view(), name='autocomplete_light_autocomplete' ), url(r'^$', RegistryView.as_view(), name='autocomplete_light_registry' ), ] if VERSION < (1, 9): urlpatterns = patterns('', *urlpatterns)
from django import VERSION from .views import AutocompleteView, RegistryView - try: + if VERSION > (1, 9): + from django.conf.urls import url + else: + try: - from django.conf.urls import patterns, url + from django.conf.urls import patterns, url ? ++++ - except ImportError: + except ImportError: ? ++++ - # Django < 1.5 + # Django < 1.5 ? ++++ - from django.conf.urls.defaults import patterns, url + from django.conf.urls.defaults import patterns, url ? ++++ urlpatterns = [ url(r'^(?P<autocomplete>[-\w]+)/$', AutocompleteView.as_view(), name='autocomplete_light_autocomplete' ), url(r'^$', RegistryView.as_view(), name='autocomplete_light_registry' ), ] if VERSION < (1, 9): urlpatterns = patterns('', *urlpatterns)
ac173dd3eace738b705ad5924aa830d3c3dffcf6
Instanssi/admin_screenshow/forms.py
Instanssi/admin_screenshow/forms.py
from django import forms from django.core.exceptions import ValidationError from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.screenshow.models import Sponsor,Message,IRCMessage import os class MessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Viesti', 'show_start', 'show_end', 'text', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Message fields = ('show_start','show_end','text') class SponsorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SponsorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Sponsori', 'name', 'logo', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Sponsor fields = ('name','logo')
from django import forms from django.core.exceptions import ValidationError from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.screenshow.models import Sponsor,Message,IRCMessage import os class IRCMessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(IRCMessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'IRC-Viesti', 'nick', 'date', 'message', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = IRCMessage fields = ('nick','message','date') class MessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Viesti', 'show_start', 'show_end', 'text', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Message fields = ('show_start','show_end','text') class SponsorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SponsorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Sponsori', 'name', 'logo', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Sponsor fields = ('name','logo')
Add form for irc messages.
admin_screenshow: Add form for irc messages.
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
from django import forms from django.core.exceptions import ValidationError from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.screenshow.models import Sponsor,Message,IRCMessage import os + + class IRCMessageForm(forms.ModelForm): + def __init__(self, *args, **kwargs): + super(IRCMessageForm, self).__init__(*args, **kwargs) + self.helper = FormHelper() + self.helper.layout = Layout( + Fieldset( + u'IRC-Viesti', + 'nick', + 'date', + 'message', + ButtonHolder ( + Submit('submit', u'Tallenna') + ) + ) + ) + + class Meta: + model = IRCMessage + fields = ('nick','message','date') class MessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Viesti', 'show_start', 'show_end', 'text', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Message fields = ('show_start','show_end','text') class SponsorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SponsorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Sponsori', 'name', 'logo', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Sponsor fields = ('name','logo')
Add form for irc messages.
## Code Before: from django import forms from django.core.exceptions import ValidationError from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.screenshow.models import Sponsor,Message,IRCMessage import os class MessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Viesti', 'show_start', 'show_end', 'text', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Message fields = ('show_start','show_end','text') class SponsorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SponsorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Sponsori', 'name', 'logo', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Sponsor fields = ('name','logo') ## Instruction: Add form for irc messages. ## Code After: from django import forms from django.core.exceptions import ValidationError from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.screenshow.models import Sponsor,Message,IRCMessage import os class IRCMessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(IRCMessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'IRC-Viesti', 'nick', 'date', 'message', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = IRCMessage fields = ('nick','message','date') class MessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Viesti', 'show_start', 'show_end', 'text', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Message fields = ('show_start','show_end','text') class SponsorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SponsorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Sponsori', 'name', 'logo', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Sponsor fields = ('name','logo')
from django import forms from django.core.exceptions import ValidationError from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.screenshow.models import Sponsor,Message,IRCMessage import os + + class IRCMessageForm(forms.ModelForm): + def __init__(self, *args, **kwargs): + super(IRCMessageForm, self).__init__(*args, **kwargs) + self.helper = FormHelper() + self.helper.layout = Layout( + Fieldset( + u'IRC-Viesti', + 'nick', + 'date', + 'message', + ButtonHolder ( + Submit('submit', u'Tallenna') + ) + ) + ) + + class Meta: + model = IRCMessage + fields = ('nick','message','date') class MessageForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MessageForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Viesti', 'show_start', 'show_end', 'text', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Message fields = ('show_start','show_end','text') class SponsorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SponsorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Sponsori', 'name', 'logo', ButtonHolder ( Submit('submit', u'Tallenna') ) ) ) class Meta: model = Sponsor fields = ('name','logo')
a670b598f4416b0e99acd7442e5a51295a5daaa3
tests/test_utils.py
tests/test_utils.py
import os import time import unittest from helpers.utils import sigchld_handler, sigterm_handler, sleep def nop(*args, **kwargs): pass def os_waitpid(a, b): return (0, 0) def time_sleep(_): sigchld_handler(None, None) class TestUtils(unittest.TestCase): def __init__(self, method_name='runTest'): self.setUp = self.set_up self.tearDown = self.tear_down super(TestUtils, self).__init__(method_name) def set_up(self): self.time_sleep = time.sleep time.sleep = nop def tear_down(self): time.sleep = self.time_sleep def test_sigterm_handler(self): self.assertRaises(SystemExit, sigterm_handler, None, None) def test_sigchld_handler(self): sigchld_handler(None, None) os.waitpid = os_waitpid sigchld_handler(None, None) def test_sleep(self): time.sleep = time_sleep sleep(0.01)
import os import time import unittest from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep def nop(*args, **kwargs): pass def os_waitpid(a, b): return (0, 0) def time_sleep(_): sigchld_handler(None, None) class TestUtils(unittest.TestCase): def __init__(self, method_name='runTest'): self.setUp = self.set_up self.tearDown = self.tear_down super(TestUtils, self).__init__(method_name) def set_up(self): self.time_sleep = time.sleep time.sleep = nop def tear_down(self): time.sleep = self.time_sleep def test_sigterm_handler(self): self.assertRaises(SystemExit, sigterm_handler, None, None) def test_reap_children(self): reap_children() os.waitpid = os_waitpid sigchld_handler(None, None) reap_children() def test_sleep(self): time.sleep = time_sleep sleep(0.01)
Implement unit test for reap_children function
Implement unit test for reap_children function
Python
mit
jinty/patroni,sean-/patroni,jinty/patroni,pgexperts/patroni,sean-/patroni,zalando/patroni,pgexperts/patroni,zalando/patroni
import os import time import unittest - from helpers.utils import sigchld_handler, sigterm_handler, sleep + from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep def nop(*args, **kwargs): pass def os_waitpid(a, b): return (0, 0) def time_sleep(_): sigchld_handler(None, None) class TestUtils(unittest.TestCase): def __init__(self, method_name='runTest'): self.setUp = self.set_up self.tearDown = self.tear_down super(TestUtils, self).__init__(method_name) def set_up(self): self.time_sleep = time.sleep time.sleep = nop def tear_down(self): time.sleep = self.time_sleep def test_sigterm_handler(self): self.assertRaises(SystemExit, sigterm_handler, None, None) - def test_sigchld_handler(self): - sigchld_handler(None, None) + def test_reap_children(self): + reap_children() os.waitpid = os_waitpid sigchld_handler(None, None) + reap_children() def test_sleep(self): time.sleep = time_sleep sleep(0.01)
Implement unit test for reap_children function
## Code Before: import os import time import unittest from helpers.utils import sigchld_handler, sigterm_handler, sleep def nop(*args, **kwargs): pass def os_waitpid(a, b): return (0, 0) def time_sleep(_): sigchld_handler(None, None) class TestUtils(unittest.TestCase): def __init__(self, method_name='runTest'): self.setUp = self.set_up self.tearDown = self.tear_down super(TestUtils, self).__init__(method_name) def set_up(self): self.time_sleep = time.sleep time.sleep = nop def tear_down(self): time.sleep = self.time_sleep def test_sigterm_handler(self): self.assertRaises(SystemExit, sigterm_handler, None, None) def test_sigchld_handler(self): sigchld_handler(None, None) os.waitpid = os_waitpid sigchld_handler(None, None) def test_sleep(self): time.sleep = time_sleep sleep(0.01) ## Instruction: Implement unit test for reap_children function ## Code After: import os import time import unittest from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep def nop(*args, **kwargs): pass def os_waitpid(a, b): return (0, 0) def time_sleep(_): sigchld_handler(None, None) class TestUtils(unittest.TestCase): def __init__(self, method_name='runTest'): self.setUp = self.set_up self.tearDown = self.tear_down super(TestUtils, self).__init__(method_name) def set_up(self): self.time_sleep = time.sleep time.sleep = nop def tear_down(self): time.sleep = self.time_sleep def test_sigterm_handler(self): self.assertRaises(SystemExit, sigterm_handler, None, None) def test_reap_children(self): reap_children() os.waitpid = os_waitpid sigchld_handler(None, None) reap_children() def test_sleep(self): time.sleep = time_sleep sleep(0.01)
import os import time import unittest - from helpers.utils import sigchld_handler, sigterm_handler, sleep + from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep ? +++++++++++++++ def nop(*args, **kwargs): pass def os_waitpid(a, b): return (0, 0) def time_sleep(_): sigchld_handler(None, None) class TestUtils(unittest.TestCase): def __init__(self, method_name='runTest'): self.setUp = self.set_up self.tearDown = self.tear_down super(TestUtils, self).__init__(method_name) def set_up(self): self.time_sleep = time.sleep time.sleep = nop def tear_down(self): time.sleep = self.time_sleep def test_sigterm_handler(self): self.assertRaises(SystemExit, sigterm_handler, None, None) - def test_sigchld_handler(self): - sigchld_handler(None, None) + def test_reap_children(self): + reap_children() os.waitpid = os_waitpid sigchld_handler(None, None) + reap_children() def test_sleep(self): time.sleep = time_sleep sleep(0.01)
550fedc513aab5feec3aaf43a49df5082a1e5dda
incuna_test_utils/testcases/urls.py
incuna_test_utils/testcases/urls.py
import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLsMixinBase(object): """A TestCase Mixin with a check_url helper method for testing urls""" def check_url(self, view, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the correct view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) self.assertViewNames(view, expected_url) def assertViewNames(self, view, expected_url): """ Assert that the view method/class that the URL resolves to is the correct one. """ raise NotImplementedError class URLsMixinForViewMethod(URLsMixinBase): """For testing method-based views.""" def assertViewNames(self, view_method, expected_url): resolved_view_method = resolve(expected_url).func self.assertEqual(resolved_view_method.__name__, view_method.__name__) class URLsMixinREST(URLsMixinBase): """For testing class-based views.""" def assertViewNames(self, view_class, expected_url): resolved_view_class = resolve(expected_url).func.cls self.assertEqual(resolved_view_class, view_class) class URLsMixin(URLsMixinREST): """For backwards compatibility.""" def __init__(self, *args, **kwargs): warnings.warn( 'URLsMixin is deprecated; use URLsMixinREST instead.', DeprecationWarning) super(URLsMixin, self).__init__(*args, **kwargs) class URLsTestCase(URLsMixin, TestCase): """For backwards compatibility. Deprecated in v0.6.""" class URLsTestCaseREST(URLsMixinREST, TestCase): """Tests class-based REST Framework views.""" class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase): """Tests (non-REST) views defined by view methods."""
import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLTestMixin(object): def assert_url_matches_view(self, view, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the expected view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) resolved_view = resolve(expected_url).func if hasattr(view, 'cls'): self.assertEqual(resolved_view.cls, view) else: self.assertEqual(resolved_view.__name__, view.__name__) class URLTestCase(URLTestMixin, TestCase): pass
Add simple URLTestMixin and URLTestCase classes
Add simple URLTestMixin and URLTestCase classes * Remove old mixins and testcases
Python
bsd-2-clause
incuna/incuna-test-utils,incuna/incuna-test-utils
import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase - class URLsMixinBase(object): + class URLTestMixin(object): - """A TestCase Mixin with a check_url helper method for testing urls""" - - def check_url(self, view, expected_url, url_name, + def assert_url_matches_view(self, view, expected_url, url_name, - url_args=None, url_kwargs=None): + url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. - Check the expected_url resolves to the correct view. + Check the expected_url resolves to the expected view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) - self.assertViewNames(view, expected_url) + resolved_view = resolve(expected_url).func + if hasattr(view, 'cls'): + self.assertEqual(resolved_view.cls, view) + else: + self.assertEqual(resolved_view.__name__, view.__name__) - def assertViewNames(self, view, expected_url): - """ - Assert that the view method/class that the URL resolves to is the - correct one. - """ - raise NotImplementedError - class URLsMixinForViewMethod(URLsMixinBase): - """For testing method-based views.""" + class URLTestCase(URLTestMixin, TestCase): + pass - def assertViewNames(self, view_method, expected_url): - resolved_view_method = resolve(expected_url).func - self.assertEqual(resolved_view_method.__name__, view_method.__name__) - - - class URLsMixinREST(URLsMixinBase): - """For testing class-based views.""" - - def assertViewNames(self, view_class, expected_url): - resolved_view_class = resolve(expected_url).func.cls - self.assertEqual(resolved_view_class, view_class) - - - class URLsMixin(URLsMixinREST): - """For backwards compatibility.""" - def __init__(self, *args, **kwargs): - warnings.warn( - 'URLsMixin is deprecated; use URLsMixinREST instead.', - DeprecationWarning) - super(URLsMixin, self).__init__(*args, **kwargs) - - - class URLsTestCase(URLsMixin, TestCase): - """For backwards compatibility. Deprecated in v0.6.""" - - - class URLsTestCaseREST(URLsMixinREST, TestCase): - """Tests class-based REST Framework views.""" - - - class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase): - """Tests (non-REST) views defined by view methods.""" -
Add simple URLTestMixin and URLTestCase classes
## Code Before: import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLsMixinBase(object): """A TestCase Mixin with a check_url helper method for testing urls""" def check_url(self, view, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the correct view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) self.assertViewNames(view, expected_url) def assertViewNames(self, view, expected_url): """ Assert that the view method/class that the URL resolves to is the correct one. """ raise NotImplementedError class URLsMixinForViewMethod(URLsMixinBase): """For testing method-based views.""" def assertViewNames(self, view_method, expected_url): resolved_view_method = resolve(expected_url).func self.assertEqual(resolved_view_method.__name__, view_method.__name__) class URLsMixinREST(URLsMixinBase): """For testing class-based views.""" def assertViewNames(self, view_class, expected_url): resolved_view_class = resolve(expected_url).func.cls self.assertEqual(resolved_view_class, view_class) class URLsMixin(URLsMixinREST): """For backwards compatibility.""" def __init__(self, *args, **kwargs): warnings.warn( 'URLsMixin is deprecated; use URLsMixinREST instead.', DeprecationWarning) super(URLsMixin, self).__init__(*args, **kwargs) class URLsTestCase(URLsMixin, TestCase): """For backwards compatibility. Deprecated in v0.6.""" class URLsTestCaseREST(URLsMixinREST, TestCase): """Tests class-based REST Framework views.""" class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase): """Tests (non-REST) views defined by view methods.""" ## Instruction: Add simple URLTestMixin and URLTestCase classes ## Code After: import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLTestMixin(object): def assert_url_matches_view(self, view, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the expected view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) resolved_view = resolve(expected_url).func if hasattr(view, 'cls'): self.assertEqual(resolved_view.cls, view) else: self.assertEqual(resolved_view.__name__, view.__name__) class URLTestCase(URLTestMixin, TestCase): pass
import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase - class URLsMixinBase(object): ? ---- + class URLTestMixin(object): ? ++ + - """A TestCase Mixin with a check_url helper method for testing urls""" - - def check_url(self, view, expected_url, url_name, ? ^^ ^^ + def assert_url_matches_view(self, view, expected_url, url_name, ? ^^^ ^^ +++++++++++++ - url_args=None, url_kwargs=None): + url_args=None, url_kwargs=None): ? ++++++++++++++ """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. - Check the expected_url resolves to the correct view. ? ^^^^ + Check the expected_url resolves to the expected view. ? ^^^ ++ """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) - self.assertViewNames(view, expected_url) + resolved_view = resolve(expected_url).func + if hasattr(view, 'cls'): + self.assertEqual(resolved_view.cls, view) + else: + self.assertEqual(resolved_view.__name__, view.__name__) - def assertViewNames(self, view, expected_url): - """ - Assert that the view method/class that the URL resolves to is the - correct one. - """ - raise NotImplementedError - class URLsMixinForViewMethod(URLsMixinBase): - """For testing method-based views.""" - - def assertViewNames(self, view_method, expected_url): - resolved_view_method = resolve(expected_url).func - self.assertEqual(resolved_view_method.__name__, view_method.__name__) - - - class URLsMixinREST(URLsMixinBase): - """For testing class-based views.""" - - def assertViewNames(self, view_class, expected_url): - resolved_view_class = resolve(expected_url).func.cls - self.assertEqual(resolved_view_class, view_class) - - - class URLsMixin(URLsMixinREST): - """For backwards compatibility.""" - def __init__(self, *args, **kwargs): - warnings.warn( - 'URLsMixin is deprecated; use URLsMixinREST instead.', - DeprecationWarning) - super(URLsMixin, self).__init__(*args, **kwargs) - - - class URLsTestCase(URLsMixin, TestCase): ? - + class URLTestCase(URLTestMixin, TestCase): ? ++ + + pass - """For backwards compatibility. Deprecated in v0.6.""" - - - class URLsTestCaseREST(URLsMixinREST, TestCase): - """Tests class-based REST Framework views.""" - - - class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase): - """Tests (non-REST) views defined by view methods."""
d05fdd1ed6657894ecc624777762b463a3ea69da
tests/basics/fun_name.py
tests/basics/fun_name.py
def Fun(): pass class A: def __init__(self): pass def Fun(self): pass try: print(Fun.__name__) print(A.__init__.__name__) print(A.Fun.__name__) print(A().Fun.__name__) except AttributeError: print('SKIP') raise SystemExit # __name__ of a bound native method is not implemented in uPy # the test here is to make sure it doesn't crash try: str((1).to_bytes.__name__) except AttributeError: pass
def Fun(): pass class A: def __init__(self): pass def Fun(self): pass try: print(Fun.__name__) print(A.__init__.__name__) print(A.Fun.__name__) print(A().Fun.__name__) except AttributeError: print('SKIP') raise SystemExit # __name__ of a bound native method is not implemented in uPy # the test here is to make sure it doesn't crash try: str((1).to_bytes.__name__) except AttributeError: pass # name of a function that has closed over variables def outer(): x = 1 def inner(): return x return inner print(outer.__name__)
Add test for getting name of func with closed over locals.
tests/basics: Add test for getting name of func with closed over locals. Tests correct decoding of the prelude to get the function name.
Python
mit
pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython
def Fun(): pass class A: def __init__(self): pass def Fun(self): pass try: print(Fun.__name__) print(A.__init__.__name__) print(A.Fun.__name__) print(A().Fun.__name__) except AttributeError: print('SKIP') raise SystemExit # __name__ of a bound native method is not implemented in uPy # the test here is to make sure it doesn't crash try: str((1).to_bytes.__name__) except AttributeError: pass + # name of a function that has closed over variables + def outer(): + x = 1 + def inner(): + return x + return inner + print(outer.__name__) +
Add test for getting name of func with closed over locals.
## Code Before: def Fun(): pass class A: def __init__(self): pass def Fun(self): pass try: print(Fun.__name__) print(A.__init__.__name__) print(A.Fun.__name__) print(A().Fun.__name__) except AttributeError: print('SKIP') raise SystemExit # __name__ of a bound native method is not implemented in uPy # the test here is to make sure it doesn't crash try: str((1).to_bytes.__name__) except AttributeError: pass ## Instruction: Add test for getting name of func with closed over locals. ## Code After: def Fun(): pass class A: def __init__(self): pass def Fun(self): pass try: print(Fun.__name__) print(A.__init__.__name__) print(A.Fun.__name__) print(A().Fun.__name__) except AttributeError: print('SKIP') raise SystemExit # __name__ of a bound native method is not implemented in uPy # the test here is to make sure it doesn't crash try: str((1).to_bytes.__name__) except AttributeError: pass # name of a function that has closed over variables def outer(): x = 1 def inner(): return x return inner print(outer.__name__)
def Fun(): pass class A: def __init__(self): pass def Fun(self): pass try: print(Fun.__name__) print(A.__init__.__name__) print(A.Fun.__name__) print(A().Fun.__name__) except AttributeError: print('SKIP') raise SystemExit # __name__ of a bound native method is not implemented in uPy # the test here is to make sure it doesn't crash try: str((1).to_bytes.__name__) except AttributeError: pass + + # name of a function that has closed over variables + def outer(): + x = 1 + def inner(): + return x + return inner + print(outer.__name__)
b50b7143185131a81e84f0659ff6405317f7d36f
resolwe/flow/execution_engines/base.py
resolwe/flow/execution_engines/base.py
"""Workflow execution engines.""" from resolwe.flow.engine import BaseEngine class BaseExecutionEngine(BaseEngine): """A workflow execution engine.""" def evaluate(self, data): """Return the code needed to compute a given Data object.""" raise NotImplementedError def get_expression_engine(self, name): """Return an expression engine by its name.""" return self.manager.get_expression_engine(name) def get_output_schema(self, process): """Return any additional output schema for the process.""" return [] def discover_process(self, path): """Perform process discovery in given path. This method will be called during process registration and should return a list of dictionaries with discovered process schemas. """ return [] def prepare_runtime(self, runtime_dir, data): """Prepare runtime directory. This method should return a dictionary of volume maps, where keys are files or directories relative the the runtime directory and values are paths under which these should be made available to the executing program. All volumes will be read-only. """
"""Workflow execution engines.""" from resolwe.flow.engine import BaseEngine class BaseExecutionEngine(BaseEngine): """A workflow execution engine.""" def evaluate(self, data): """Return the code needed to compute a given Data object.""" raise NotImplementedError def get_expression_engine(self, name): """Return an expression engine by its name.""" return self.manager.get_expression_engine(name) def get_output_schema(self, process): """Return any additional output schema for the process.""" return [] def discover_process(self, path): """Perform process discovery in given path. This method will be called during process registration and should return a list of dictionaries with discovered process schemas. """ return [] def prepare_runtime(self, runtime_dir, data): """Prepare runtime directory. This method should return a dictionary of volume maps, where keys are files or directories relative the the runtime directory and values are paths under which these should be made available to the executing program. All volumes will be read-only. """ return {}
Return empty dictionary instead of None
Return empty dictionary instead of None
Python
apache-2.0
genialis/resolwe,genialis/resolwe
"""Workflow execution engines.""" from resolwe.flow.engine import BaseEngine class BaseExecutionEngine(BaseEngine): """A workflow execution engine.""" def evaluate(self, data): """Return the code needed to compute a given Data object.""" raise NotImplementedError def get_expression_engine(self, name): """Return an expression engine by its name.""" return self.manager.get_expression_engine(name) def get_output_schema(self, process): """Return any additional output schema for the process.""" return [] def discover_process(self, path): """Perform process discovery in given path. This method will be called during process registration and should return a list of dictionaries with discovered process schemas. """ return [] def prepare_runtime(self, runtime_dir, data): """Prepare runtime directory. This method should return a dictionary of volume maps, where keys are files or directories relative the the runtime directory and values are paths under which these should be made available to the executing program. All volumes will be read-only. """ + return {}
Return empty dictionary instead of None
## Code Before: """Workflow execution engines.""" from resolwe.flow.engine import BaseEngine class BaseExecutionEngine(BaseEngine): """A workflow execution engine.""" def evaluate(self, data): """Return the code needed to compute a given Data object.""" raise NotImplementedError def get_expression_engine(self, name): """Return an expression engine by its name.""" return self.manager.get_expression_engine(name) def get_output_schema(self, process): """Return any additional output schema for the process.""" return [] def discover_process(self, path): """Perform process discovery in given path. This method will be called during process registration and should return a list of dictionaries with discovered process schemas. """ return [] def prepare_runtime(self, runtime_dir, data): """Prepare runtime directory. This method should return a dictionary of volume maps, where keys are files or directories relative the the runtime directory and values are paths under which these should be made available to the executing program. All volumes will be read-only. """ ## Instruction: Return empty dictionary instead of None ## Code After: """Workflow execution engines.""" from resolwe.flow.engine import BaseEngine class BaseExecutionEngine(BaseEngine): """A workflow execution engine.""" def evaluate(self, data): """Return the code needed to compute a given Data object.""" raise NotImplementedError def get_expression_engine(self, name): """Return an expression engine by its name.""" return self.manager.get_expression_engine(name) def get_output_schema(self, process): """Return any additional output schema for the process.""" return [] def discover_process(self, path): """Perform process discovery in given path. This method will be called during process registration and should return a list of dictionaries with discovered process schemas. """ return [] def prepare_runtime(self, runtime_dir, data): """Prepare runtime directory. This method should return a dictionary of volume maps, where keys are files or directories relative the the runtime directory and values are paths under which these should be made available to the executing program. All volumes will be read-only. """ return {}
"""Workflow execution engines.""" from resolwe.flow.engine import BaseEngine class BaseExecutionEngine(BaseEngine): """A workflow execution engine.""" def evaluate(self, data): """Return the code needed to compute a given Data object.""" raise NotImplementedError def get_expression_engine(self, name): """Return an expression engine by its name.""" return self.manager.get_expression_engine(name) def get_output_schema(self, process): """Return any additional output schema for the process.""" return [] def discover_process(self, path): """Perform process discovery in given path. This method will be called during process registration and should return a list of dictionaries with discovered process schemas. """ return [] def prepare_runtime(self, runtime_dir, data): """Prepare runtime directory. This method should return a dictionary of volume maps, where keys are files or directories relative the the runtime directory and values are paths under which these should be made available to the executing program. All volumes will be read-only. """ + return {}
0a5e2134fda46269626b6fac367a28218734b256
conf_site/accounts/tests/__init__.py
conf_site/accounts/tests/__init__.py
from factory import fuzzy from django.contrib.auth import get_user_model from django.test import TestCase class AccountsTestCase(TestCase): def setUp(self): super(AccountsTestCase, self).setUp() self.password = fuzzy.FuzzyText(length=16) self.new_password = fuzzy.FuzzyText(length=16) user_model = get_user_model() self.user = user_model.objects.get_or_create( username="test", email="[email protected]", first_name="Test", last_name="User", )[0] self.user.set_password(self.password) self.user.save() def _become_superuser(self): """Make this testcase's user a superuser.""" self.user.is_superuser = True self.user.save()
from factory import fuzzy from django.contrib.auth import get_user_model from django.test import TestCase class AccountsTestCase(TestCase): def setUp(self): super(AccountsTestCase, self).setUp() self.password = fuzzy.FuzzyText(length=16) self.new_password = fuzzy.FuzzyText(length=16) user_model = get_user_model() self.user = user_model.objects.get_or_create( username="test", email="[email protected]", first_name="Test", last_name="User", )[0] self.user.set_password(self.password) self.user.save() def _become_staff(self): """Make this testcase's user a staff user.""" self.user.is_staff = True self.user.is_superuser = False self.user.save() def _become_superuser(self): """Make this testcase's user a superuser.""" self.user.is_superuser = True self.user.save()
Add `_become_staff` method to AccountsTestCase.
Add `_become_staff` method to AccountsTestCase.
Python
mit
pydata/conf_site,pydata/conf_site,pydata/conf_site
from factory import fuzzy from django.contrib.auth import get_user_model from django.test import TestCase class AccountsTestCase(TestCase): def setUp(self): super(AccountsTestCase, self).setUp() self.password = fuzzy.FuzzyText(length=16) self.new_password = fuzzy.FuzzyText(length=16) user_model = get_user_model() self.user = user_model.objects.get_or_create( username="test", email="[email protected]", first_name="Test", last_name="User", )[0] self.user.set_password(self.password) self.user.save() + def _become_staff(self): + """Make this testcase's user a staff user.""" + self.user.is_staff = True + self.user.is_superuser = False + self.user.save() + def _become_superuser(self): """Make this testcase's user a superuser.""" self.user.is_superuser = True self.user.save()
Add `_become_staff` method to AccountsTestCase.
## Code Before: from factory import fuzzy from django.contrib.auth import get_user_model from django.test import TestCase class AccountsTestCase(TestCase): def setUp(self): super(AccountsTestCase, self).setUp() self.password = fuzzy.FuzzyText(length=16) self.new_password = fuzzy.FuzzyText(length=16) user_model = get_user_model() self.user = user_model.objects.get_or_create( username="test", email="[email protected]", first_name="Test", last_name="User", )[0] self.user.set_password(self.password) self.user.save() def _become_superuser(self): """Make this testcase's user a superuser.""" self.user.is_superuser = True self.user.save() ## Instruction: Add `_become_staff` method to AccountsTestCase. ## Code After: from factory import fuzzy from django.contrib.auth import get_user_model from django.test import TestCase class AccountsTestCase(TestCase): def setUp(self): super(AccountsTestCase, self).setUp() self.password = fuzzy.FuzzyText(length=16) self.new_password = fuzzy.FuzzyText(length=16) user_model = get_user_model() self.user = user_model.objects.get_or_create( username="test", email="[email protected]", first_name="Test", last_name="User", )[0] self.user.set_password(self.password) self.user.save() def _become_staff(self): """Make this testcase's user a staff user.""" self.user.is_staff = True self.user.is_superuser = False self.user.save() def _become_superuser(self): """Make this testcase's user a superuser.""" self.user.is_superuser = True self.user.save()
from factory import fuzzy from django.contrib.auth import get_user_model from django.test import TestCase class AccountsTestCase(TestCase): def setUp(self): super(AccountsTestCase, self).setUp() self.password = fuzzy.FuzzyText(length=16) self.new_password = fuzzy.FuzzyText(length=16) user_model = get_user_model() self.user = user_model.objects.get_or_create( username="test", email="[email protected]", first_name="Test", last_name="User", )[0] self.user.set_password(self.password) self.user.save() + def _become_staff(self): + """Make this testcase's user a staff user.""" + self.user.is_staff = True + self.user.is_superuser = False + self.user.save() + def _become_superuser(self): """Make this testcase's user a superuser.""" self.user.is_superuser = True self.user.save()
0e1bdcb4e6d2404bb832ab86ec7bf526c1c90bbb
teami18n/teami18n/models.py
teami18n/teami18n/models.py
from django.db import models from django_countries import countries class Country(models.Model): code = models.CharField(max_length=2, choices=tuple(countries), unique=True) class Podcast(models.Model): story_id = models.CharField(max_length=16, unique=True) link = models.URLField() title = models.TextField() teaser = models.TextField() program_name = models.TextField(blank=True) show_date = models.DateTimeField(null=True, blank=True) image_link = models.URLField(null=True, blank=True) countries = models.ManyToManyField(Country, related_name="podcasts")
from django.db import models from django_countries import countries class Country(models.Model): code = models.CharField(max_length=2, choices=tuple(countries), unique=True) def __unicode__(self): return self.code class Podcast(models.Model): story_id = models.CharField(max_length=16, unique=True) link = models.URLField() title = models.TextField() teaser = models.TextField() program_name = models.TextField(blank=True) show_date = models.DateTimeField(null=True, blank=True) image_link = models.URLField(null=True, blank=True) countries = models.ManyToManyField(Country, related_name="podcasts") def __unicode__(self): return self.title
Add nice name for working in the shell
Add nice name for working in the shell
Python
mit
team-i18n/hackaway,team-i18n/hackaway,team-i18n/hackaway
from django.db import models from django_countries import countries class Country(models.Model): code = models.CharField(max_length=2, choices=tuple(countries), unique=True) + + def __unicode__(self): + return self.code class Podcast(models.Model): story_id = models.CharField(max_length=16, unique=True) link = models.URLField() title = models.TextField() teaser = models.TextField() program_name = models.TextField(blank=True) show_date = models.DateTimeField(null=True, blank=True) image_link = models.URLField(null=True, blank=True) countries = models.ManyToManyField(Country, related_name="podcasts") + def __unicode__(self): + return self.title +
Add nice name for working in the shell
## Code Before: from django.db import models from django_countries import countries class Country(models.Model): code = models.CharField(max_length=2, choices=tuple(countries), unique=True) class Podcast(models.Model): story_id = models.CharField(max_length=16, unique=True) link = models.URLField() title = models.TextField() teaser = models.TextField() program_name = models.TextField(blank=True) show_date = models.DateTimeField(null=True, blank=True) image_link = models.URLField(null=True, blank=True) countries = models.ManyToManyField(Country, related_name="podcasts") ## Instruction: Add nice name for working in the shell ## Code After: from django.db import models from django_countries import countries class Country(models.Model): code = models.CharField(max_length=2, choices=tuple(countries), unique=True) def __unicode__(self): return self.code class Podcast(models.Model): story_id = models.CharField(max_length=16, unique=True) link = models.URLField() title = models.TextField() teaser = models.TextField() program_name = models.TextField(blank=True) show_date = models.DateTimeField(null=True, blank=True) image_link = models.URLField(null=True, blank=True) countries = models.ManyToManyField(Country, related_name="podcasts") def __unicode__(self): return self.title
from django.db import models from django_countries import countries class Country(models.Model): code = models.CharField(max_length=2, choices=tuple(countries), unique=True) + + def __unicode__(self): + return self.code class Podcast(models.Model): story_id = models.CharField(max_length=16, unique=True) link = models.URLField() title = models.TextField() teaser = models.TextField() program_name = models.TextField(blank=True) show_date = models.DateTimeField(null=True, blank=True) image_link = models.URLField(null=True, blank=True) countries = models.ManyToManyField(Country, related_name="podcasts") + + def __unicode__(self): + return self.title
e2ca99c9f3548fa0d4e46bdd3b309ebd0e658ffa
test/backend/wayland/conftest.py
test/backend/wayland/conftest.py
import contextlib import os from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" raise NotImplementedError def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" raise NotImplementedError
import contextlib import os import textwrap from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" self.manager.c.eval(textwrap.dedent(""" self.core._focus_by_click() self.core._process_cursor_button(1, True) """)) def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" success, result = self.manager.c.eval(textwrap.dedent(""" [win.wid for win in self.core.mapped_windows] """)) assert success return eval(result)
Add Wayland Backend.fake_click and Backend.get_all_windows methods
Add Wayland Backend.fake_click and Backend.get_all_windows methods These work by eval-ing in the test Qtile instance. It might be nicer to instead make these cmd_s on the `Core` if/when we expose cmd_ methods from the Core.
Python
mit
ramnes/qtile,ramnes/qtile,qtile/qtile,qtile/qtile
import contextlib import os + import textwrap from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" - raise NotImplementedError + self.manager.c.eval(textwrap.dedent(""" + self.core._focus_by_click() + self.core._process_cursor_button(1, True) + """)) def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" - raise NotImplementedError + success, result = self.manager.c.eval(textwrap.dedent(""" + [win.wid for win in self.core.mapped_windows] + """)) + assert success + return eval(result)
Add Wayland Backend.fake_click and Backend.get_all_windows methods
## Code Before: import contextlib import os from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" raise NotImplementedError def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" raise NotImplementedError ## Instruction: Add Wayland Backend.fake_click and Backend.get_all_windows methods ## Code After: import contextlib import os import textwrap from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" self.manager.c.eval(textwrap.dedent(""" self.core._focus_by_click() self.core._process_cursor_button(1, True) """)) def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" success, result = self.manager.c.eval(textwrap.dedent(""" [win.wid for win in self.core.mapped_windows] """)) assert success return eval(result)
import contextlib import os + import textwrap from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" - raise NotImplementedError + self.manager.c.eval(textwrap.dedent(""" + self.core._focus_by_click() + self.core._process_cursor_button(1, True) + """)) def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" - raise NotImplementedError + success, result = self.manager.c.eval(textwrap.dedent(""" + [win.wid for win in self.core.mapped_windows] + """)) + assert success + return eval(result)
1e60c603321729c71895ac5dc19adc669cce4a72
tests/udev_test.py
tests/udev_test.py
import unittest import mock class UdevTest(unittest.TestCase): def setUp(self): import blivet.udev blivet.udev.os = mock.Mock() blivet.udev.log = mock.Mock() def test_udev_get_device(self): import blivet.udev devices = blivet.udev.global_udev.list_devices(subsystem="block") for device in devices: self.assertNotEqual(blivet.udev.get_device(device.sys_path), None) def udev_settle_test(self): import blivet.udev blivet.udev.util = mock.Mock() blivet.udev.settle() self.assertTrue(blivet.udev.util.run_program.called) def udev_trigger_test(self): import blivet.udev blivet.udev.util = mock.Mock() blivet.udev.trigger() self.assertTrue(blivet.udev.util.run_program.called) if __name__ == "__main__": unittest.main()
import unittest import mock class UdevTest(unittest.TestCase): def setUp(self): import blivet.udev self._blivet_os = blivet.udev.os self._blivet_log = blivet.udev.log self._blivet_util = blivet.udev.util blivet.udev.os = mock.Mock() blivet.udev.log = mock.Mock() blivet.udev.util = mock.Mock() def tearDown(self): import blivet.udev blivet.udev.log = self._blivet_log blivet.udev.os = self._blivet_os blivet.udev.util = self._blivet_util def test_udev_get_device(self): import blivet.udev devices = blivet.udev.global_udev.list_devices(subsystem="block") for device in devices: self.assertNotEqual(blivet.udev.get_device(device.sys_path), None) def udev_settle_test(self): import blivet.udev blivet.udev.settle() self.assertTrue(blivet.udev.util.run_program.called) def udev_trigger_test(self): import blivet.udev blivet.udev.trigger() self.assertTrue(blivet.udev.util.run_program.called) if __name__ == "__main__": unittest.main()
Clean up mocking done by udev tests when finished.
Clean up mocking done by udev tests when finished.
Python
lgpl-2.1
dwlehman/blivet,rvykydal/blivet,AdamWill/blivet,rhinstaller/blivet,vpodzime/blivet,AdamWill/blivet,vojtechtrefny/blivet,vojtechtrefny/blivet,vpodzime/blivet,rvykydal/blivet,rhinstaller/blivet,dwlehman/blivet,jkonecny12/blivet,jkonecny12/blivet
import unittest import mock class UdevTest(unittest.TestCase): def setUp(self): import blivet.udev + self._blivet_os = blivet.udev.os + self._blivet_log = blivet.udev.log + self._blivet_util = blivet.udev.util blivet.udev.os = mock.Mock() blivet.udev.log = mock.Mock() + blivet.udev.util = mock.Mock() + + def tearDown(self): + import blivet.udev + blivet.udev.log = self._blivet_log + blivet.udev.os = self._blivet_os + blivet.udev.util = self._blivet_util def test_udev_get_device(self): import blivet.udev devices = blivet.udev.global_udev.list_devices(subsystem="block") for device in devices: self.assertNotEqual(blivet.udev.get_device(device.sys_path), None) def udev_settle_test(self): import blivet.udev - blivet.udev.util = mock.Mock() blivet.udev.settle() self.assertTrue(blivet.udev.util.run_program.called) def udev_trigger_test(self): import blivet.udev - blivet.udev.util = mock.Mock() blivet.udev.trigger() self.assertTrue(blivet.udev.util.run_program.called) if __name__ == "__main__": unittest.main()
Clean up mocking done by udev tests when finished.
## Code Before: import unittest import mock class UdevTest(unittest.TestCase): def setUp(self): import blivet.udev blivet.udev.os = mock.Mock() blivet.udev.log = mock.Mock() def test_udev_get_device(self): import blivet.udev devices = blivet.udev.global_udev.list_devices(subsystem="block") for device in devices: self.assertNotEqual(blivet.udev.get_device(device.sys_path), None) def udev_settle_test(self): import blivet.udev blivet.udev.util = mock.Mock() blivet.udev.settle() self.assertTrue(blivet.udev.util.run_program.called) def udev_trigger_test(self): import blivet.udev blivet.udev.util = mock.Mock() blivet.udev.trigger() self.assertTrue(blivet.udev.util.run_program.called) if __name__ == "__main__": unittest.main() ## Instruction: Clean up mocking done by udev tests when finished. ## Code After: import unittest import mock class UdevTest(unittest.TestCase): def setUp(self): import blivet.udev self._blivet_os = blivet.udev.os self._blivet_log = blivet.udev.log self._blivet_util = blivet.udev.util blivet.udev.os = mock.Mock() blivet.udev.log = mock.Mock() blivet.udev.util = mock.Mock() def tearDown(self): import blivet.udev blivet.udev.log = self._blivet_log blivet.udev.os = self._blivet_os blivet.udev.util = self._blivet_util def test_udev_get_device(self): import blivet.udev devices = blivet.udev.global_udev.list_devices(subsystem="block") for device in devices: self.assertNotEqual(blivet.udev.get_device(device.sys_path), None) def udev_settle_test(self): import blivet.udev blivet.udev.settle() self.assertTrue(blivet.udev.util.run_program.called) def udev_trigger_test(self): import blivet.udev blivet.udev.trigger() self.assertTrue(blivet.udev.util.run_program.called) if __name__ == "__main__": unittest.main()
import unittest import mock class UdevTest(unittest.TestCase): def setUp(self): import blivet.udev + self._blivet_os = blivet.udev.os + self._blivet_log = blivet.udev.log + self._blivet_util = blivet.udev.util blivet.udev.os = mock.Mock() blivet.udev.log = mock.Mock() + blivet.udev.util = mock.Mock() + + def tearDown(self): + import blivet.udev + blivet.udev.log = self._blivet_log + blivet.udev.os = self._blivet_os + blivet.udev.util = self._blivet_util def test_udev_get_device(self): import blivet.udev devices = blivet.udev.global_udev.list_devices(subsystem="block") for device in devices: self.assertNotEqual(blivet.udev.get_device(device.sys_path), None) def udev_settle_test(self): import blivet.udev - blivet.udev.util = mock.Mock() blivet.udev.settle() self.assertTrue(blivet.udev.util.run_program.called) def udev_trigger_test(self): import blivet.udev - blivet.udev.util = mock.Mock() blivet.udev.trigger() self.assertTrue(blivet.udev.util.run_program.called) if __name__ == "__main__": unittest.main()
572a84ae4fe7ce464fe66b6462a80b09b20f8f1c
fireplace/cards/gvg/neutral_epic.py
fireplace/cards/gvg/neutral_epic.py
from ..utils import * ## # Minions # Hobgoblin class GVG_104: def OWN_CARD_PLAYED(self, card): if card.type == CardType.MINION and card.atk == 1: return [Buff(card, "GVG_104a")]
from ..utils import * ## # Minions # Hobgoblin class GVG_104: def OWN_CARD_PLAYED(self, card): if card.type == CardType.MINION and card.atk == 1: return [Buff(card, "GVG_104a")] # Piloted Sky Golem class GVG_105: def deathrattle(self): return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))] # Junkbot class GVG_106: def OWN_MINION_DESTROY(self, minion): if minion.race == Race.MECHANICAL: return [Buff(SELF, "GVG_106e")] # Enhance-o Mechano class GVG_107: def action(self): for target in self.controller.field: tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD)) yield SetTag(target, {tag: True})
Implement Piloted Sky Golem, Junkbot and Enhance-o Mechano
Implement Piloted Sky Golem, Junkbot and Enhance-o Mechano
Python
agpl-3.0
NightKev/fireplace,Ragowit/fireplace,liujimj/fireplace,Meerkov/fireplace,smallnamespace/fireplace,amw2104/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace,oftc-ftw/fireplace,butozerca/fireplace,Meerkov/fireplace,jleclanche/fireplace,amw2104/fireplace,butozerca/fireplace,Ragowit/fireplace,liujimj/fireplace
from ..utils import * ## # Minions # Hobgoblin class GVG_104: def OWN_CARD_PLAYED(self, card): if card.type == CardType.MINION and card.atk == 1: return [Buff(card, "GVG_104a")] + + # Piloted Sky Golem + class GVG_105: + def deathrattle(self): + return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))] + + + # Junkbot + class GVG_106: + def OWN_MINION_DESTROY(self, minion): + if minion.race == Race.MECHANICAL: + return [Buff(SELF, "GVG_106e")] + + + # Enhance-o Mechano + class GVG_107: + def action(self): + for target in self.controller.field: + tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD)) + yield SetTag(target, {tag: True}) +
Implement Piloted Sky Golem, Junkbot and Enhance-o Mechano
## Code Before: from ..utils import * ## # Minions # Hobgoblin class GVG_104: def OWN_CARD_PLAYED(self, card): if card.type == CardType.MINION and card.atk == 1: return [Buff(card, "GVG_104a")] ## Instruction: Implement Piloted Sky Golem, Junkbot and Enhance-o Mechano ## Code After: from ..utils import * ## # Minions # Hobgoblin class GVG_104: def OWN_CARD_PLAYED(self, card): if card.type == CardType.MINION and card.atk == 1: return [Buff(card, "GVG_104a")] # Piloted Sky Golem class GVG_105: def deathrattle(self): return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))] # Junkbot class GVG_106: def OWN_MINION_DESTROY(self, minion): if minion.race == Race.MECHANICAL: return [Buff(SELF, "GVG_106e")] # Enhance-o Mechano class GVG_107: def action(self): for target in self.controller.field: tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD)) yield SetTag(target, {tag: True})
from ..utils import * ## # Minions # Hobgoblin class GVG_104: def OWN_CARD_PLAYED(self, card): if card.type == CardType.MINION and card.atk == 1: return [Buff(card, "GVG_104a")] + + + # Piloted Sky Golem + class GVG_105: + def deathrattle(self): + return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))] + + + # Junkbot + class GVG_106: + def OWN_MINION_DESTROY(self, minion): + if minion.race == Race.MECHANICAL: + return [Buff(SELF, "GVG_106e")] + + + # Enhance-o Mechano + class GVG_107: + def action(self): + for target in self.controller.field: + tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD)) + yield SetTag(target, {tag: True})
6263c544a5f8e09f1e3c2ee761af70f71acd0c79
webapp/tests/__init__.py
webapp/tests/__init__.py
from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): brand = Brand(id='acme', title='ACME') db.session.add(brand) party = Party(id='acme-2014', brand=brand, title='ACME 2014') db.session.add(party) db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): self.brand = Brand(id='acme', title='ACME') db.session.add(self.brand) self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014') db.session.add(self.party) db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
Make brand and party available to tests.
Make brand and party available to tests.
Python
bsd-3-clause
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): - brand = Brand(id='acme', title='ACME') + self.brand = Brand(id='acme', title='ACME') - db.session.add(brand) + db.session.add(self.brand) + - party = Party(id='acme-2014', brand=brand, title='ACME 2014') + self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014') - db.session.add(party) + db.session.add(self.party) + db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
Make brand and party available to tests.
## Code Before: from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): brand = Brand(id='acme', title='ACME') db.session.add(brand) party = Party(id='acme-2014', brand=brand, title='ACME 2014') db.session.add(party) db.session.commit() def tearDown(self): db.session.remove() db.drop_all() ## Instruction: Make brand and party available to tests. ## Code After: from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): self.brand = Brand(id='acme', title='ACME') db.session.add(self.brand) self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014') db.session.add(self.party) db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): - brand = Brand(id='acme', title='ACME') + self.brand = Brand(id='acme', title='ACME') ? +++++ - db.session.add(brand) + db.session.add(self.brand) ? +++++ + - party = Party(id='acme-2014', brand=brand, title='ACME 2014') + self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014') ? +++++ +++++ - db.session.add(party) + db.session.add(self.party) ? +++++ + db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
7654d9dcebb0ad1e862e376b5b694234173289ed
twitter_helper/util.py
twitter_helper/util.py
import random def random_line(afile, max_chars = 123, min_chars = 5): line = next(afile) for num, aline in enumerate(afile): aline = aline.strip() if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2): continue line = aline return line def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,): line = random_line(text_file, max_chars, min_chars) number = random.randrange(1,1000,2) line = "{0}] " + line + signature line = line.format(number) return line
import random def random_line(afile, max_chars = 123, min_chars = 5): line = next(afile) for num, aline in enumerate(afile): aline = aline.strip() if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2): continue line = aline #Be polite, put things back in the place you found them afile.seek(0) return line def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,): line = random_line(text_file, max_chars, min_chars) number = random.randrange(1,1000,2) line = "{0}] " + line + signature line = line.format(number) return line
Reset pointer to the beginning of file once read it
Reset pointer to the beginning of file once read it Be polite, put things back in the place you found them
Python
mit
kuzeko/Twitter-Importer,kuzeko/Twitter-Importer
import random def random_line(afile, max_chars = 123, min_chars = 5): line = next(afile) for num, aline in enumerate(afile): aline = aline.strip() if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2): continue line = aline + #Be polite, put things back in the place you found them + afile.seek(0) return line def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,): line = random_line(text_file, max_chars, min_chars) number = random.randrange(1,1000,2) line = "{0}] " + line + signature line = line.format(number) return line
Reset pointer to the beginning of file once read it
## Code Before: import random def random_line(afile, max_chars = 123, min_chars = 5): line = next(afile) for num, aline in enumerate(afile): aline = aline.strip() if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2): continue line = aline return line def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,): line = random_line(text_file, max_chars, min_chars) number = random.randrange(1,1000,2) line = "{0}] " + line + signature line = line.format(number) return line ## Instruction: Reset pointer to the beginning of file once read it ## Code After: import random def random_line(afile, max_chars = 123, min_chars = 5): line = next(afile) for num, aline in enumerate(afile): aline = aline.strip() if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2): continue line = aline #Be polite, put things back in the place you found them afile.seek(0) return line def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,): line = random_line(text_file, max_chars, min_chars) number = random.randrange(1,1000,2) line = "{0}] " + line + signature line = line.format(number) return line
import random def random_line(afile, max_chars = 123, min_chars = 5): line = next(afile) for num, aline in enumerate(afile): aline = aline.strip() if (len(aline) < min_chars or aline[0].islower() or len(aline) > max_chars) or random.randrange(num + 2): continue line = aline + #Be polite, put things back in the place you found them + afile.seek(0) return line def prepare_quote(text_file, signature=" -- Hamlet", max_chars = 123, min_chars = 5,): line = random_line(text_file, max_chars, min_chars) number = random.randrange(1,1000,2) line = "{0}] " + line + signature line = line.format(number) return line
429738972be911f6b05358c918f822270eb94da7
botbot/checks.py
botbot/checks.py
"""Functions for checking files""" import os import stat import mimetypes from .checker import is_link from .config import CONFIG def is_fastq(fi): """Check whether a given file is a fastq file.""" path = fi['path'] if os.path.splitext(path)[1] == ".fastq": if not is_link(path): return 'PROB_FILE_IS_FASTQ' def sam_should_compress(fi): """Check if a *.SAM file should be compressed or deleted""" path = fi['path'] name, ext = os.path.splitext(path) if ext == '.sam': if os.path.isfile('.'.join((name, 'bam'))): return 'PROB_SAM_AND_BAM_EXIST' else: return 'PROB_SAM_SHOULD_COMPRESS' def is_large_plaintext(fi): """Detect if a file plaintext and >100MB""" guess = mimetypes.guess_type(fi['path']) mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification large = CONFIG.get('checks', 'largesize', fallback=100000000) # Default to 100MB old = CONFIG.get('checks', 'oldage', fallback=30) # Default to one month if guess == 'text/plain' and fi['size'] > large and mod_days >= old: return 'PROB_OLD_LARGE_PLAINTEXT'
"""Functions for checking files""" import os import stat import mimetypes from .checker import is_link from .config import CONFIG def is_fastq(fi): """Check whether a given file is a fastq file.""" path = fi['path'] if os.path.splitext(path)[1] == ".fastq": if not is_link(path): return 'PROB_FILE_IS_FASTQ' def sam_should_compress(fi): """Check if a *.SAM file should be compressed or deleted""" path = fi['path'] name, ext = os.path.splitext(path) if ext == '.sam': if os.path.isfile('.'.join((name, 'bam'))): return 'PROB_SAM_AND_BAM_EXIST' else: return 'PROB_SAM_SHOULD_COMPRESS' elif ext == '.bam': if os.path.isfile('.'.join((name, 'sam'))): return 'PROB_SAM_SHOULD_COMPRESS' def is_large_plaintext(fi): """Detect if a file plaintext and >100MB""" guess = mimetypes.guess_type(fi['path']) mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification large = CONFIG.get('checks', 'largesize', fallback=100000000) # Default to 100MB old = CONFIG.get('checks', 'oldage', fallback=30) # Default to one month if guess == 'text/plain' and fi['size'] > large and mod_days >= old: return 'PROB_OLD_LARGE_PLAINTEXT'
Fix SAM checker to for better coverage
Fix SAM checker to for better coverage
Python
mit
jackstanek/BotBot,jackstanek/BotBot
"""Functions for checking files""" import os import stat import mimetypes from .checker import is_link from .config import CONFIG def is_fastq(fi): """Check whether a given file is a fastq file.""" path = fi['path'] if os.path.splitext(path)[1] == ".fastq": if not is_link(path): return 'PROB_FILE_IS_FASTQ' def sam_should_compress(fi): """Check if a *.SAM file should be compressed or deleted""" path = fi['path'] name, ext = os.path.splitext(path) if ext == '.sam': if os.path.isfile('.'.join((name, 'bam'))): return 'PROB_SAM_AND_BAM_EXIST' else: return 'PROB_SAM_SHOULD_COMPRESS' + elif ext == '.bam': + if os.path.isfile('.'.join((name, 'sam'))): + return 'PROB_SAM_SHOULD_COMPRESS' def is_large_plaintext(fi): """Detect if a file plaintext and >100MB""" guess = mimetypes.guess_type(fi['path']) mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification large = CONFIG.get('checks', 'largesize', fallback=100000000) # Default to 100MB old = CONFIG.get('checks', 'oldage', fallback=30) # Default to one month if guess == 'text/plain' and fi['size'] > large and mod_days >= old: return 'PROB_OLD_LARGE_PLAINTEXT'
Fix SAM checker to for better coverage
## Code Before: """Functions for checking files""" import os import stat import mimetypes from .checker import is_link from .config import CONFIG def is_fastq(fi): """Check whether a given file is a fastq file.""" path = fi['path'] if os.path.splitext(path)[1] == ".fastq": if not is_link(path): return 'PROB_FILE_IS_FASTQ' def sam_should_compress(fi): """Check if a *.SAM file should be compressed or deleted""" path = fi['path'] name, ext = os.path.splitext(path) if ext == '.sam': if os.path.isfile('.'.join((name, 'bam'))): return 'PROB_SAM_AND_BAM_EXIST' else: return 'PROB_SAM_SHOULD_COMPRESS' def is_large_plaintext(fi): """Detect if a file plaintext and >100MB""" guess = mimetypes.guess_type(fi['path']) mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification large = CONFIG.get('checks', 'largesize', fallback=100000000) # Default to 100MB old = CONFIG.get('checks', 'oldage', fallback=30) # Default to one month if guess == 'text/plain' and fi['size'] > large and mod_days >= old: return 'PROB_OLD_LARGE_PLAINTEXT' ## Instruction: Fix SAM checker to for better coverage ## Code After: """Functions for checking files""" import os import stat import mimetypes from .checker import is_link from .config import CONFIG def is_fastq(fi): """Check whether a given file is a fastq file.""" path = fi['path'] if os.path.splitext(path)[1] == ".fastq": if not is_link(path): return 'PROB_FILE_IS_FASTQ' def sam_should_compress(fi): """Check if a *.SAM file should be compressed or deleted""" path = fi['path'] name, ext = os.path.splitext(path) if ext == '.sam': if os.path.isfile('.'.join((name, 'bam'))): return 'PROB_SAM_AND_BAM_EXIST' else: return 'PROB_SAM_SHOULD_COMPRESS' elif ext == '.bam': if os.path.isfile('.'.join((name, 'sam'))): return 'PROB_SAM_SHOULD_COMPRESS' def is_large_plaintext(fi): """Detect if a file plaintext and >100MB""" guess = mimetypes.guess_type(fi['path']) mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification large = CONFIG.get('checks', 'largesize', fallback=100000000) # Default to 100MB old = CONFIG.get('checks', 'oldage', fallback=30) # Default to one month if guess == 'text/plain' and fi['size'] > large and mod_days >= old: return 'PROB_OLD_LARGE_PLAINTEXT'
"""Functions for checking files""" import os import stat import mimetypes from .checker import is_link from .config import CONFIG def is_fastq(fi): """Check whether a given file is a fastq file.""" path = fi['path'] if os.path.splitext(path)[1] == ".fastq": if not is_link(path): return 'PROB_FILE_IS_FASTQ' def sam_should_compress(fi): """Check if a *.SAM file should be compressed or deleted""" path = fi['path'] name, ext = os.path.splitext(path) if ext == '.sam': if os.path.isfile('.'.join((name, 'bam'))): return 'PROB_SAM_AND_BAM_EXIST' else: return 'PROB_SAM_SHOULD_COMPRESS' + elif ext == '.bam': + if os.path.isfile('.'.join((name, 'sam'))): + return 'PROB_SAM_SHOULD_COMPRESS' def is_large_plaintext(fi): """Detect if a file plaintext and >100MB""" guess = mimetypes.guess_type(fi['path']) mod_days = fi['lastmod'] / (24 * 60 * 60) # Days since last modification large = CONFIG.get('checks', 'largesize', fallback=100000000) # Default to 100MB old = CONFIG.get('checks', 'oldage', fallback=30) # Default to one month if guess == 'text/plain' and fi['size'] > large and mod_days >= old: return 'PROB_OLD_LARGE_PLAINTEXT'
b5e11827929f37da8d18616f1fb3fc2d62591515
djangocms_spa/decorators.py
djangocms_spa/decorators.py
from functools import wraps from django.conf import settings from django.core.cache import cache from django.template.response import ContentNotRenderedError from django.utils.decorators import available_attrs def cache_view(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view_func(view, *args, **kwargs): cache_key = view.request.get_full_path() cached_response = cache.get(cache_key) if cached_response and not view.request.user.is_authenticated(): return cached_response response = view_func(view, *args, **kwargs) if response.status_code == 200 and not view.request.user.is_authenticated(): try: set_cache_after_rendering(cache_key, response, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) except ContentNotRenderedError: response.add_post_render_callback( lambda r: set_cache_after_rendering(cache_key, r, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) ) return response return _wrapped_view_func def set_cache_after_rendering(cache_key, response, timeout): cache.set(cache_key, response, timeout)
from functools import wraps from django.conf import settings from django.core.cache import cache from django.template.response import ContentNotRenderedError from django.utils.decorators import available_attrs def cache_view(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view_func(view, *args, **kwargs): request = view.request language_code = request.LANGUAGE_CODE cache_key = '{path}:{lang}'.format(path=request.get_full_path(), lang=language_code) cached_response = cache.get(cache_key) if cached_response and not request.user.is_authenticated(): return cached_response response = view_func(view, *args, **kwargs) if response.status_code == 200 and not request.user.is_authenticated(): try: set_cache_after_rendering(cache_key, response, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) except ContentNotRenderedError: response.add_post_render_callback( lambda r: set_cache_after_rendering(cache_key, r, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) ) return response return _wrapped_view_func def set_cache_after_rendering(cache_key, response, timeout): cache.set(cache_key, response, timeout)
Add language code to cache key explicitly
[language_activation] Add language code to cache key explicitly
Python
mit
dreipol/djangocms-spa,dreipol/djangocms-spa
from functools import wraps from django.conf import settings from django.core.cache import cache from django.template.response import ContentNotRenderedError from django.utils.decorators import available_attrs def cache_view(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view_func(view, *args, **kwargs): - cache_key = view.request.get_full_path() + request = view.request + language_code = request.LANGUAGE_CODE + cache_key = '{path}:{lang}'.format(path=request.get_full_path(), lang=language_code) cached_response = cache.get(cache_key) - if cached_response and not view.request.user.is_authenticated(): + if cached_response and not request.user.is_authenticated(): return cached_response response = view_func(view, *args, **kwargs) - if response.status_code == 200 and not view.request.user.is_authenticated(): + if response.status_code == 200 and not request.user.is_authenticated(): try: set_cache_after_rendering(cache_key, response, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) except ContentNotRenderedError: response.add_post_render_callback( lambda r: set_cache_after_rendering(cache_key, r, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) ) return response return _wrapped_view_func def set_cache_after_rendering(cache_key, response, timeout): cache.set(cache_key, response, timeout)
Add language code to cache key explicitly
## Code Before: from functools import wraps from django.conf import settings from django.core.cache import cache from django.template.response import ContentNotRenderedError from django.utils.decorators import available_attrs def cache_view(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view_func(view, *args, **kwargs): cache_key = view.request.get_full_path() cached_response = cache.get(cache_key) if cached_response and not view.request.user.is_authenticated(): return cached_response response = view_func(view, *args, **kwargs) if response.status_code == 200 and not view.request.user.is_authenticated(): try: set_cache_after_rendering(cache_key, response, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) except ContentNotRenderedError: response.add_post_render_callback( lambda r: set_cache_after_rendering(cache_key, r, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) ) return response return _wrapped_view_func def set_cache_after_rendering(cache_key, response, timeout): cache.set(cache_key, response, timeout) ## Instruction: Add language code to cache key explicitly ## Code After: from functools import wraps from django.conf import settings from django.core.cache import cache from django.template.response import ContentNotRenderedError from django.utils.decorators import available_attrs def cache_view(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view_func(view, *args, **kwargs): request = view.request language_code = request.LANGUAGE_CODE cache_key = '{path}:{lang}'.format(path=request.get_full_path(), lang=language_code) cached_response = cache.get(cache_key) if cached_response and not request.user.is_authenticated(): return cached_response response = view_func(view, *args, **kwargs) if response.status_code == 200 and not request.user.is_authenticated(): try: set_cache_after_rendering(cache_key, response, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) except ContentNotRenderedError: response.add_post_render_callback( lambda r: set_cache_after_rendering(cache_key, r, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) ) return response return _wrapped_view_func def set_cache_after_rendering(cache_key, response, timeout): cache.set(cache_key, response, timeout)
from functools import wraps from django.conf import settings from django.core.cache import cache from django.template.response import ContentNotRenderedError from django.utils.decorators import available_attrs def cache_view(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view_func(view, *args, **kwargs): - cache_key = view.request.get_full_path() + request = view.request + language_code = request.LANGUAGE_CODE + cache_key = '{path}:{lang}'.format(path=request.get_full_path(), lang=language_code) cached_response = cache.get(cache_key) - if cached_response and not view.request.user.is_authenticated(): ? ----- + if cached_response and not request.user.is_authenticated(): return cached_response response = view_func(view, *args, **kwargs) - if response.status_code == 200 and not view.request.user.is_authenticated(): ? ----- + if response.status_code == 200 and not request.user.is_authenticated(): try: set_cache_after_rendering(cache_key, response, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) except ContentNotRenderedError: response.add_post_render_callback( lambda r: set_cache_after_rendering(cache_key, r, settings.DJANGOCMS_SPA_CACHE_TIMEOUT) ) return response return _wrapped_view_func def set_cache_after_rendering(cache_key, response, timeout): cache.set(cache_key, response, timeout)
19e26d09659dc4db6bcd27565dacd458b7e3e4cd
symposion/proposals/management/commands/ensure_proposal_records.py
symposion/proposals/management/commands/ensure_proposal_records.py
from django.core.management.base import NoArgsCommand class Command(NoArgsCommand): def handle_noargs(self, **options): from symposion.proposals.kinds import ensure_proposal_records ensure_proposal_records()
from django.core.management.base import BaseCommand class Command(BaseCommand): def handle(self, *args, **options): from symposion.proposals.kinds import ensure_proposal_records ensure_proposal_records()
Use BaseCommand instead of NoArgsCommand
Use BaseCommand instead of NoArgsCommand
Python
bsd-3-clause
PyCon/pycon,njl/pycon,njl/pycon,njl/pycon,PyCon/pycon,njl/pycon,PyCon/pycon,PyCon/pycon
- from django.core.management.base import NoArgsCommand + from django.core.management.base import BaseCommand - class Command(NoArgsCommand): + class Command(BaseCommand): - def handle_noargs(self, **options): + def handle(self, *args, **options): from symposion.proposals.kinds import ensure_proposal_records ensure_proposal_records()
Use BaseCommand instead of NoArgsCommand
## Code Before: from django.core.management.base import NoArgsCommand class Command(NoArgsCommand): def handle_noargs(self, **options): from symposion.proposals.kinds import ensure_proposal_records ensure_proposal_records() ## Instruction: Use BaseCommand instead of NoArgsCommand ## Code After: from django.core.management.base import BaseCommand class Command(BaseCommand): def handle(self, *args, **options): from symposion.proposals.kinds import ensure_proposal_records ensure_proposal_records()
- from django.core.management.base import NoArgsCommand ? ^^^^^ + from django.core.management.base import BaseCommand ? ^^ + - class Command(NoArgsCommand): ? ^^^^^ + class Command(BaseCommand): ? ^^ + - def handle_noargs(self, **options): ? ------- + def handle(self, *args, **options): ? +++++++ from symposion.proposals.kinds import ensure_proposal_records ensure_proposal_records()
44520918dc0fad40f3afcfc2cdfde6f3208543cd
garden_lighting/MCP23017/raspberry.py
garden_lighting/MCP23017/raspberry.py
import time import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode # We don't need performance, don't want root and don't want to interfere with # other wiringpi instances -> sysfspy wiringpi.wiringPiSetupSys() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
import time import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' # Set pin numbering mode # wiringPiSetupSys() did not work because pins were low after booting and running the write commands # This requires root! wiringpi.wiringPiSetupGpio() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
Python
mit
ammannbros/garden-lighting,ammannbros/garden-lighting,ammannbros/garden-lighting,ammannbros/garden-lighting
import time - import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' - os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode - # We don't need performance, don't want root and don't want to interfere with - # other wiringpi instances -> sysfspy + # wiringPiSetupSys() did not work because pins were low after booting and running the write commands + # This requires root! - wiringpi.wiringPiSetupSys() + wiringpi.wiringPiSetupGpio() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
## Code Before: import time import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode # We don't need performance, don't want root and don't want to interfere with # other wiringpi instances -> sysfspy wiringpi.wiringPiSetupSys() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH) ## Instruction: Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot. ## Code After: import time import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' # Set pin numbering mode # wiringPiSetupSys() did not work because pins were low after booting and running the write commands # This requires root! wiringpi.wiringPiSetupGpio() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
import time - import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' - os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode - # We don't need performance, don't want root and don't want to interfere with - # other wiringpi instances -> sysfspy + # wiringPiSetupSys() did not work because pins were low after booting and running the write commands + # This requires root! - wiringpi.wiringPiSetupSys() ? ^^^ + wiringpi.wiringPiSetupGpio() ? ^^^^ # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
2be2b71dbd3aba4d7aee2c54102eeac45252c5ed
drftutorial/catalog/views.py
drftutorial/catalog/views.py
from django.http import HttpResponse from rest_framework.response import Response from rest_framework.views import APIView from .models import Product from .serializers import ProductSerializer class ProductList(APIView): def get(self, request, format=None): products = Product.objects.all() serializer = ProductSerializer(products, many=True) return Response(serializer.data)
from django.http import HttpResponse from rest_framework.response import Response from rest_framework.views import APIView from rest_framework import status from .models import Product from .serializers import ProductSerializer class ProductList(APIView): def get(self, request, format=None): products = Product.objects.all() serializer = ProductSerializer(products, many=True) return Response(serializer.data) def post(self, request, format=None): serializer = ProductSerializer(data=request.data) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Add POST method to ProductList class
Add POST method to ProductList class
Python
mit
andreagrandi/drf-tutorial
from django.http import HttpResponse from rest_framework.response import Response from rest_framework.views import APIView + from rest_framework import status from .models import Product from .serializers import ProductSerializer class ProductList(APIView): def get(self, request, format=None): products = Product.objects.all() serializer = ProductSerializer(products, many=True) return Response(serializer.data) + def post(self, request, format=None): + serializer = ProductSerializer(data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) +
Add POST method to ProductList class
## Code Before: from django.http import HttpResponse from rest_framework.response import Response from rest_framework.views import APIView from .models import Product from .serializers import ProductSerializer class ProductList(APIView): def get(self, request, format=None): products = Product.objects.all() serializer = ProductSerializer(products, many=True) return Response(serializer.data) ## Instruction: Add POST method to ProductList class ## Code After: from django.http import HttpResponse from rest_framework.response import Response from rest_framework.views import APIView from rest_framework import status from .models import Product from .serializers import ProductSerializer class ProductList(APIView): def get(self, request, format=None): products = Product.objects.all() serializer = ProductSerializer(products, many=True) return Response(serializer.data) def post(self, request, format=None): serializer = ProductSerializer(data=request.data) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
from django.http import HttpResponse from rest_framework.response import Response from rest_framework.views import APIView + from rest_framework import status from .models import Product from .serializers import ProductSerializer class ProductList(APIView): def get(self, request, format=None): products = Product.objects.all() serializer = ProductSerializer(products, many=True) return Response(serializer.data) + + def post(self, request, format=None): + serializer = ProductSerializer(data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
ac9ee17f93b90a79b629d222d8c2846debed6f04
chalice/__init__.py
chalice/__init__.py
from chalice.app import Chalice from chalice.app import ( ChaliceViewError, BadRequestError, UnauthorizedError, ForbiddenError, NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig ) __version__ = '0.8.0'
from chalice.app import Chalice from chalice.app import ( ChaliceViewError, BadRequestError, UnauthorizedError, ForbiddenError, NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig, CustomAuthorizer, CognitoUserPoolAuthorizer ) __version__ = '0.8.0'
Add authorizers as top level import
Add authorizers as top level import
Python
apache-2.0
awslabs/chalice
from chalice.app import Chalice from chalice.app import ( ChaliceViewError, BadRequestError, UnauthorizedError, ForbiddenError, - NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig + NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig, + CustomAuthorizer, CognitoUserPoolAuthorizer ) __version__ = '0.8.0'
Add authorizers as top level import
## Code Before: from chalice.app import Chalice from chalice.app import ( ChaliceViewError, BadRequestError, UnauthorizedError, ForbiddenError, NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig ) __version__ = '0.8.0' ## Instruction: Add authorizers as top level import ## Code After: from chalice.app import Chalice from chalice.app import ( ChaliceViewError, BadRequestError, UnauthorizedError, ForbiddenError, NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig, CustomAuthorizer, CognitoUserPoolAuthorizer ) __version__ = '0.8.0'
from chalice.app import Chalice from chalice.app import ( ChaliceViewError, BadRequestError, UnauthorizedError, ForbiddenError, - NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig + NotFoundError, ConflictError, TooManyRequestsError, Response, CORSConfig, ? + + CustomAuthorizer, CognitoUserPoolAuthorizer ) __version__ = '0.8.0'
0b741c89ea19759f25526256ee039707cb423cef
aldryn_faq/tests/test_menu.py
aldryn_faq/tests/test_menu.py
from __future__ import unicode_literals from aldryn_faq.menu import FaqCategoryMenu from django.utils.translation import ( get_language_from_request, ) from .test_base import AldrynFaqTest, CMSRequestBasedTest class TestMenu(AldrynFaqTest, CMSRequestBasedTest): def test_get_nodes(self): # Test that the EN version of the menu has only category1 and is shown # in English. request = self.get_page_request(None, self.user, '/en/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'en') self.assertEqualItems( [menuitem.title for menuitem in menu.get_nodes(request)], [category1.name] ) # Test that the DE version has 2 categories and that they are shown in # German. request = self.get_page_request(None, self.user, '/de/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'de') category2 = self.reload(self.category2, 'de') nodes = menu.get_nodes(request) self.assertEqualItems( [menuitem.title for menuitem in nodes], [category1.name, category2.name] )
from __future__ import unicode_literals from aldryn_faq.menu import FaqCategoryMenu from .test_base import AldrynFaqTest, CMSRequestBasedTest class TestMenu(AldrynFaqTest, CMSRequestBasedTest): def test_get_nodes(self): # Test that the EN version of the menu has only category1 and its # question1, and is shown in English. request = self.get_page_request(None, self.user, '/en/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'en') question1 = self.reload(self.question1, 'en') self.assertEqualItems( [menuitem.title for menuitem in menu.get_nodes(request)], [category1.name, question1.title] ) # Test that the DE version has 2 categories and their questions that # they are shown in German. request = self.get_page_request(None, self.user, '/de/') menu = FaqCategoryMenu() nodes = menu.get_nodes(request) self.assertEqualItems( [menuitem.title for menuitem in nodes], [self.category1.name, self.category2.name, self.question1.title, self.question2.title] )
Fix tests to now include the questions, which are now in the menu
Fix tests to now include the questions, which are now in the menu
Python
bsd-3-clause
czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq
from __future__ import unicode_literals from aldryn_faq.menu import FaqCategoryMenu + - from django.utils.translation import ( - get_language_from_request, - ) from .test_base import AldrynFaqTest, CMSRequestBasedTest class TestMenu(AldrynFaqTest, CMSRequestBasedTest): def test_get_nodes(self): - # Test that the EN version of the menu has only category1 and is shown + # Test that the EN version of the menu has only category1 and its - # in English. + # question1, and is shown in English. request = self.get_page_request(None, self.user, '/en/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'en') + question1 = self.reload(self.question1, 'en') self.assertEqualItems( [menuitem.title for menuitem in menu.get_nodes(request)], - [category1.name] + [category1.name, question1.title] ) - # Test that the DE version has 2 categories and that they are shown in + # Test that the DE version has 2 categories and their questions that - # German. + # they are shown in German. request = self.get_page_request(None, self.user, '/de/') menu = FaqCategoryMenu() - category1 = self.reload(self.category1, 'de') - category2 = self.reload(self.category2, 'de') nodes = menu.get_nodes(request) self.assertEqualItems( [menuitem.title for menuitem in nodes], - [category1.name, category2.name] + [self.category1.name, self.category2.name, self.question1.title, + self.question2.title] )
Fix tests to now include the questions, which are now in the menu
## Code Before: from __future__ import unicode_literals from aldryn_faq.menu import FaqCategoryMenu from django.utils.translation import ( get_language_from_request, ) from .test_base import AldrynFaqTest, CMSRequestBasedTest class TestMenu(AldrynFaqTest, CMSRequestBasedTest): def test_get_nodes(self): # Test that the EN version of the menu has only category1 and is shown # in English. request = self.get_page_request(None, self.user, '/en/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'en') self.assertEqualItems( [menuitem.title for menuitem in menu.get_nodes(request)], [category1.name] ) # Test that the DE version has 2 categories and that they are shown in # German. request = self.get_page_request(None, self.user, '/de/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'de') category2 = self.reload(self.category2, 'de') nodes = menu.get_nodes(request) self.assertEqualItems( [menuitem.title for menuitem in nodes], [category1.name, category2.name] ) ## Instruction: Fix tests to now include the questions, which are now in the menu ## Code After: from __future__ import unicode_literals from aldryn_faq.menu import FaqCategoryMenu from .test_base import AldrynFaqTest, CMSRequestBasedTest class TestMenu(AldrynFaqTest, CMSRequestBasedTest): def test_get_nodes(self): # Test that the EN version of the menu has only category1 and its # question1, and is shown in English. request = self.get_page_request(None, self.user, '/en/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'en') question1 = self.reload(self.question1, 'en') self.assertEqualItems( [menuitem.title for menuitem in menu.get_nodes(request)], [category1.name, question1.title] ) # Test that the DE version has 2 categories and their questions that # they are shown in German. request = self.get_page_request(None, self.user, '/de/') menu = FaqCategoryMenu() nodes = menu.get_nodes(request) self.assertEqualItems( [menuitem.title for menuitem in nodes], [self.category1.name, self.category2.name, self.question1.title, self.question2.title] )
from __future__ import unicode_literals from aldryn_faq.menu import FaqCategoryMenu + - from django.utils.translation import ( - get_language_from_request, - ) from .test_base import AldrynFaqTest, CMSRequestBasedTest class TestMenu(AldrynFaqTest, CMSRequestBasedTest): def test_get_nodes(self): - # Test that the EN version of the menu has only category1 and is shown ? ------ + # Test that the EN version of the menu has only category1 and its ? + - # in English. + # question1, and is shown in English. request = self.get_page_request(None, self.user, '/en/') menu = FaqCategoryMenu() category1 = self.reload(self.category1, 'en') + question1 = self.reload(self.question1, 'en') self.assertEqualItems( [menuitem.title for menuitem in menu.get_nodes(request)], - [category1.name] + [category1.name, question1.title] ? +++++++++++++++++ ) - # Test that the DE version has 2 categories and that they are shown in ? ^ --- ^^^^^^^^^^^ + # Test that the DE version has 2 categories and their questions that ? ^^^^^^^^ ++++ ^ - # German. + # they are shown in German. request = self.get_page_request(None, self.user, '/de/') menu = FaqCategoryMenu() - category1 = self.reload(self.category1, 'de') - category2 = self.reload(self.category2, 'de') nodes = menu.get_nodes(request) self.assertEqualItems( [menuitem.title for menuitem in nodes], - [category1.name, category2.name] + [self.category1.name, self.category2.name, self.question1.title, + self.question2.title] )
938725a3693ee885a761e5ba07e75d2b94d78661
pytask/profile/urls.py
pytask/profile/urls.py
from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('pytask.profile.views', url(r'^view/$', 'view_profile', name='view_profile'), url(r'^edit/$', 'edit_profile', name='edit_profile'), url(r'^notf/browse/$', 'browse_notifications', name='edit_profile'), url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification', name='view_notification'), url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification', name='delete_notification'), url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification', name='unread_notification'), url(r'^user/view/(?P<user_id>\d+)$', 'view_user', name='view_user'), )
from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('pytask.profile.views', url(r'^view/$', 'view_profile', name='view_profile'), url(r'^edit/$', 'edit_profile', name='edit_profile'), url(r'^notification/browse/$', 'browse_notifications', name='browse_notifications'), url(r'^notification/view/(?P<notification_id>\d+)$', 'view_notification', name='view_notification'), url(r'^notification/delete/(?P<notification_id>\d+)$', 'delete_notification', name='delete_notification'), url(r'^notification/unread/(?P<notification_id>\d+)$', 'unread_notification', name='unread_notification'), url(r'^user/view/(?P<user_id>\d+)$', 'view_user', name='view_user'), )
Fix styling issue in URLConf.
Fix styling issue in URLConf.
Python
agpl-3.0
madhusudancs/pytask,madhusudancs/pytask,madhusudancs/pytask
from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('pytask.profile.views', url(r'^view/$', 'view_profile', name='view_profile'), url(r'^edit/$', 'edit_profile', name='edit_profile'), - url(r'^notf/browse/$', 'browse_notifications', + url(r'^notification/browse/$', 'browse_notifications', - name='edit_profile'), - url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification', - name='view_notification'), - url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification', - name='delete_notification'), - url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification', - name='unread_notification'), + name='browse_notifications'), + url(r'^notification/view/(?P<notification_id>\d+)$', + 'view_notification', name='view_notification'), + url(r'^notification/delete/(?P<notification_id>\d+)$', + 'delete_notification', name='delete_notification'), + url(r'^notification/unread/(?P<notification_id>\d+)$', + 'unread_notification', name='unread_notification'), url(r'^user/view/(?P<user_id>\d+)$', 'view_user', name='view_user'), )
Fix styling issue in URLConf.
## Code Before: from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('pytask.profile.views', url(r'^view/$', 'view_profile', name='view_profile'), url(r'^edit/$', 'edit_profile', name='edit_profile'), url(r'^notf/browse/$', 'browse_notifications', name='edit_profile'), url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification', name='view_notification'), url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification', name='delete_notification'), url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification', name='unread_notification'), url(r'^user/view/(?P<user_id>\d+)$', 'view_user', name='view_user'), ) ## Instruction: Fix styling issue in URLConf. ## Code After: from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('pytask.profile.views', url(r'^view/$', 'view_profile', name='view_profile'), url(r'^edit/$', 'edit_profile', name='edit_profile'), url(r'^notification/browse/$', 'browse_notifications', name='browse_notifications'), url(r'^notification/view/(?P<notification_id>\d+)$', 'view_notification', name='view_notification'), url(r'^notification/delete/(?P<notification_id>\d+)$', 'delete_notification', name='delete_notification'), url(r'^notification/unread/(?P<notification_id>\d+)$', 'unread_notification', name='unread_notification'), url(r'^user/view/(?P<user_id>\d+)$', 'view_user', name='view_user'), )
from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('pytask.profile.views', url(r'^view/$', 'view_profile', name='view_profile'), url(r'^edit/$', 'edit_profile', name='edit_profile'), - url(r'^notf/browse/$', 'browse_notifications', + url(r'^notification/browse/$', 'browse_notifications', ? + +++++++ - name='edit_profile'), - url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification', - name='view_notification'), - url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification', - name='delete_notification'), - url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification', - name='unread_notification'), ? ^^ -- + name='browse_notifications'), ? ^ +++ + + url(r'^notification/view/(?P<notification_id>\d+)$', + 'view_notification', name='view_notification'), + url(r'^notification/delete/(?P<notification_id>\d+)$', + 'delete_notification', name='delete_notification'), + url(r'^notification/unread/(?P<notification_id>\d+)$', + 'unread_notification', name='unread_notification'), url(r'^user/view/(?P<user_id>\d+)$', 'view_user', name='view_user'), )
f38b117316039042f3c00c73bbb7ceaeb0f2e6e1
src/python/pants/core_tasks/noop.py
src/python/pants/core_tasks/noop.py
from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.task.noop_exec_task import NoopExecTask class NoopCompile(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling.""" @classmethod def product_types(cls): return ['ran_compile'] class NoopTest(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling.""" @classmethod def product_types(cls): return ['ran_tests']
from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.task.noop_exec_task import NoopExecTask class NoopCompile(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling. :API: public """ @classmethod def product_types(cls): return ['ran_compile'] class NoopTest(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling. :API: public """ @classmethod def product_types(cls): return ['ran_tests']
Add public api markers for core_tasks
Add public api markers for core_tasks The following modules were reviewed and all api's were left as private. As far as I can tell these modules are not currently used by plugins. * pants.core_tasks.bash_completion.py * pants.core_tasks.changed_target_tasks.py * pants.core_tasks.clean.py * pants.core_tasks.deferred_sources_mapper.py * pants.core_tasks.explain_options_task.py * pants.core_tasks.invalidate.py * pants.core_tasks.list_goals.py * pants.core_tasks.pantsd_kill.py * pants.core_tasks.register.py * pants.core_tasks.reporting_server_kill.py * pants.core_tasks.reporting_server_run.py * pants.core_tasks.roots.py * pants.core_tasks.run_prep_command.py * pants.core_tasks.targets_help.py * pants.core_tasks.what_changed.py Testing Done: CI green: https://travis-ci.org/pantsbuild/pants/builds/111549646 Bugs closed: 2710, 2955 Reviewed at https://rbcommons.com/s/twitter/r/3490/
Python
apache-2.0
manasapte/pants,twitter/pants,fkorotkov/pants,jsirois/pants,pantsbuild/pants,peiyuwang/pants,pombredanne/pants,cevaris/pants,fkorotkov/pants,mateor/pants,baroquebobcat/pants,gmalmquist/pants,peiyuwang/pants,fkorotkov/pants,wisechengyi/pants,fkorotkov/pants,UnrememberMe/pants,wisechengyi/pants,ericzundel/pants,ericzundel/pants,landism/pants,ericzundel/pants,cevaris/pants,tdyas/pants,pombredanne/pants,jsirois/pants,gmalmquist/pants,kwlzn/pants,baroquebobcat/pants,gmalmquist/pants,15Dkatz/pants,manasapte/pants,peiyuwang/pants,peiyuwang/pants,twitter/pants,ericzundel/pants,pombredanne/pants,kwlzn/pants,baroquebobcat/pants,jsirois/pants,baroquebobcat/pants,twitter/pants,pombredanne/pants,foursquare/pants,baroquebobcat/pants,lahosken/pants,mateor/pants,tdyas/pants,lahosken/pants,UnrememberMe/pants,fkorotkov/pants,manasapte/pants,foursquare/pants,twitter/pants,lahosken/pants,benjyw/pants,lahosken/pants,wisechengyi/pants,ity/pants,pombredanne/pants,foursquare/pants,ity/pants,mateor/pants,landism/pants,fkorotkov/pants,gmalmquist/pants,ericzundel/pants,wisechengyi/pants,lahosken/pants,mateor/pants,UnrememberMe/pants,15Dkatz/pants,15Dkatz/pants,kwlzn/pants,foursquare/pants,dbentley/pants,dbentley/pants,wisechengyi/pants,pantsbuild/pants,tdyas/pants,benjyw/pants,lahosken/pants,wisechengyi/pants,landism/pants,cevaris/pants,pantsbuild/pants,gmalmquist/pants,UnrememberMe/pants,cevaris/pants,lahosken/pants,benjyw/pants,tdyas/pants,pombredanne/pants,ericzundel/pants,tdyas/pants,baroquebobcat/pants,dbentley/pants,UnrememberMe/pants,benjyw/pants,mateor/pants,UnrememberMe/pants,benjyw/pants,baroquebobcat/pants,ity/pants,foursquare/pants,pantsbuild/pants,manasapte/pants,landism/pants,kwlzn/pants,landism/pants,pantsbuild/pants,landism/pants,twitter/pants,ity/pants,wisechengyi/pants,15Dkatz/pants,peiyuwang/pants,kwlzn/pants,gmalmquist/pants,twitter/pants,cevaris/pants,foursquare/pants,dbentley/pants,twitter/pants,ity/pants,benjyw/pants,mateor/pants,foursquare/pants,manasapte/pants,pantsbuild/pants,cevaris/pants,dbentley/pants,cevaris/pants,fkorotkov/pants,lahosken/pants,mateor/pants,ity/pants,UnrememberMe/pants,15Dkatz/pants,ericzundel/pants,pantsbuild/pants,twitter/pants,UnrememberMe/pants,peiyuwang/pants,dbentley/pants,gmalmquist/pants,ericzundel/pants,landism/pants,peiyuwang/pants,manasapte/pants,tdyas/pants,foursquare/pants,wisechengyi/pants,tdyas/pants,dbentley/pants,landism/pants,benjyw/pants,manasapte/pants,mateor/pants,15Dkatz/pants,ity/pants,peiyuwang/pants,tdyas/pants,UnrememberMe/pants,wisechengyi/pants,baroquebobcat/pants,twitter/pants,fkorotkov/pants,kwlzn/pants,15Dkatz/pants,15Dkatz/pants,baroquebobcat/pants,foursquare/pants,kwlzn/pants,tdyas/pants,pombredanne/pants
from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.task.noop_exec_task import NoopExecTask class NoopCompile(NoopExecTask): - """A no-op that provides a product type that can be used to force scheduling.""" + """A no-op that provides a product type that can be used to force scheduling. + + :API: public + """ @classmethod def product_types(cls): return ['ran_compile'] class NoopTest(NoopExecTask): - """A no-op that provides a product type that can be used to force scheduling.""" + """A no-op that provides a product type that can be used to force scheduling. + + :API: public + """ @classmethod def product_types(cls): return ['ran_tests']
Add public api markers for core_tasks
## Code Before: from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.task.noop_exec_task import NoopExecTask class NoopCompile(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling.""" @classmethod def product_types(cls): return ['ran_compile'] class NoopTest(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling.""" @classmethod def product_types(cls): return ['ran_tests'] ## Instruction: Add public api markers for core_tasks ## Code After: from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.task.noop_exec_task import NoopExecTask class NoopCompile(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling. :API: public """ @classmethod def product_types(cls): return ['ran_compile'] class NoopTest(NoopExecTask): """A no-op that provides a product type that can be used to force scheduling. :API: public """ @classmethod def product_types(cls): return ['ran_tests']
from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.task.noop_exec_task import NoopExecTask class NoopCompile(NoopExecTask): - """A no-op that provides a product type that can be used to force scheduling.""" ? --- + """A no-op that provides a product type that can be used to force scheduling. + + :API: public + """ @classmethod def product_types(cls): return ['ran_compile'] class NoopTest(NoopExecTask): - """A no-op that provides a product type that can be used to force scheduling.""" ? --- + """A no-op that provides a product type that can be used to force scheduling. + + :API: public + """ @classmethod def product_types(cls): return ['ran_tests']
b556bffeb5ed48812258b452e05cc00cfb160453
girder/app/app/configuration.py
girder/app/app/configuration.py
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): return { 'features': { 'notebooks': Setting().get(Features.NOTEBOOKS, True) }, 'deployment': { 'site': Setting().get(Deployment.SITE, '') }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): notebooks = Setting().get(Features.NOTEBOOKS) if notebooks is None: notebooks = True site = Setting().get(Deployment.SITE) if site is None: site = '' return { 'features': { 'notebooks': notebooks }, 'deployment': { 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
Fix up settings for upstream Girder change
Fix up settings for upstream Girder change
Python
bsd-3-clause
OpenChemistry/mongochemserver
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): + + notebooks = Setting().get(Features.NOTEBOOKS) + if notebooks is None: + notebooks = True + + site = Setting().get(Deployment.SITE) + if site is None: + site = '' + return { 'features': { - 'notebooks': Setting().get(Features.NOTEBOOKS, True) + 'notebooks': notebooks }, 'deployment': { - 'site': Setting().get(Deployment.SITE, '') + 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
Fix up settings for upstream Girder change
## Code Before: from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): return { 'features': { 'notebooks': Setting().get(Features.NOTEBOOKS, True) }, 'deployment': { 'site': Setting().get(Deployment.SITE, '') }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } } ## Instruction: Fix up settings for upstream Girder change ## Code After: from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): notebooks = Setting().get(Features.NOTEBOOKS) if notebooks is None: notebooks = True site = Setting().get(Deployment.SITE) if site is None: site = '' return { 'features': { 'notebooks': notebooks }, 'deployment': { 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): + + notebooks = Setting().get(Features.NOTEBOOKS) + if notebooks is None: + notebooks = True + + site = Setting().get(Deployment.SITE) + if site is None: + site = '' + return { 'features': { - 'notebooks': Setting().get(Features.NOTEBOOKS, True) + 'notebooks': notebooks }, 'deployment': { - 'site': Setting().get(Deployment.SITE, '') + 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
09f649ac0b14269067c43df9f879d963ab99cdac
backend/breach/views.py
backend/breach/views.py
import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return HttpResponse(json.dumps(new_work), content_type='application/json') @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return JsonResponse(new_work) @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
Fix response with json for get_work
Fix response with json for get_work
Python
mit
dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture
import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() - return HttpResponse(json.dumps(new_work), content_type='application/json') + return JsonResponse(new_work) + @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
Fix response with json for get_work
## Code Before: import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return HttpResponse(json.dumps(new_work), content_type='application/json') @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory }) ## Instruction: Fix response with json for get_work ## Code After: import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return JsonResponse(new_work) @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() - return HttpResponse(json.dumps(new_work), content_type='application/json') + return JsonResponse(new_work) + @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
09f38a9f13521c69be2e047628c74ed571f0d8de
ideascube/wsgi.py
ideascube/wsgi.py
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings") os.environ.setdefault("STORAGE_ROOT", "/var/ideascube") from django.core.management import call_command call_command('migrate', '--noinput', '--verbosity=1', '--database=default') call_command('migrate', '--noinput', '--verbosity=1', '--database=transient') call_command('collectstatic', '--noinput', '--verbosity=1') from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings") os.environ.setdefault("STORAGE_ROOT", "/var/ideascube") from django.core.wsgi import get_wsgi_application application = get_wsgi_application() from django.core.management import call_command call_command('migrate', '--noinput', '--verbosity=1', '--database=default') call_command('migrate', '--noinput', '--verbosity=1', '--database=transient') call_command('collectstatic', '--noinput', '--verbosity=1')
Load the application before running commands
Load the application before running commands
Python
agpl-3.0
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings") os.environ.setdefault("STORAGE_ROOT", "/var/ideascube") + + from django.core.wsgi import get_wsgi_application + application = get_wsgi_application() from django.core.management import call_command call_command('migrate', '--noinput', '--verbosity=1', '--database=default') call_command('migrate', '--noinput', '--verbosity=1', '--database=transient') call_command('collectstatic', '--noinput', '--verbosity=1') - from django.core.wsgi import get_wsgi_application - application = get_wsgi_application()
Load the application before running commands
## Code Before: import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings") os.environ.setdefault("STORAGE_ROOT", "/var/ideascube") from django.core.management import call_command call_command('migrate', '--noinput', '--verbosity=1', '--database=default') call_command('migrate', '--noinput', '--verbosity=1', '--database=transient') call_command('collectstatic', '--noinput', '--verbosity=1') from django.core.wsgi import get_wsgi_application application = get_wsgi_application() ## Instruction: Load the application before running commands ## Code After: import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings") os.environ.setdefault("STORAGE_ROOT", "/var/ideascube") from django.core.wsgi import get_wsgi_application application = get_wsgi_application() from django.core.management import call_command call_command('migrate', '--noinput', '--verbosity=1', '--database=default') call_command('migrate', '--noinput', '--verbosity=1', '--database=transient') call_command('collectstatic', '--noinput', '--verbosity=1')
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings") os.environ.setdefault("STORAGE_ROOT", "/var/ideascube") + + from django.core.wsgi import get_wsgi_application + application = get_wsgi_application() from django.core.management import call_command call_command('migrate', '--noinput', '--verbosity=1', '--database=default') call_command('migrate', '--noinput', '--verbosity=1', '--database=transient') call_command('collectstatic', '--noinput', '--verbosity=1') - from django.core.wsgi import get_wsgi_application - application = get_wsgi_application()
4b0b85a54208625c7a2753d8ba9b96818f1411d0
denorm/__init__.py
denorm/__init__.py
from denorm.fields import denormalized from denorm.dependencies import depend_on_related,depend_on_q
from denorm.fields import denormalized from denorm.dependencies import depend_on_related, depend_on_q __all__ = ["denormalized", "depend_on_related", "depend_on_q"]
Use __all__ to make it not overwrite .models randomly.
Use __all__ to make it not overwrite .models randomly.
Python
bsd-3-clause
heinrich5991/django-denorm,miracle2k/django-denorm,Chive/django-denorm,anentropic/django-denorm,simas/django-denorm,catalanojuan/django-denorm,victorvde/django-denorm,initcrash/django-denorm,PetrDlouhy/django-denorm,gerdemb/django-denorm,kennknowles/django-denorm,Eksmo/django-denorm,alex-mcleod/django-denorm,mjtamlyn/django-denorm,Kronuz/django-denorm,lechup/django-denorm,idahogray/django-denorm,larsbijl/django-denorm,incuna/django-denorm
from denorm.fields import denormalized - from denorm.dependencies import depend_on_related,depend_on_q + from denorm.dependencies import depend_on_related, depend_on_q + __all__ = ["denormalized", "depend_on_related", "depend_on_q"] +
Use __all__ to make it not overwrite .models randomly.
## Code Before: from denorm.fields import denormalized from denorm.dependencies import depend_on_related,depend_on_q ## Instruction: Use __all__ to make it not overwrite .models randomly. ## Code After: from denorm.fields import denormalized from denorm.dependencies import depend_on_related, depend_on_q __all__ = ["denormalized", "depend_on_related", "depend_on_q"]
from denorm.fields import denormalized - from denorm.dependencies import depend_on_related,depend_on_q + from denorm.dependencies import depend_on_related, depend_on_q ? + + + __all__ = ["denormalized", "depend_on_related", "depend_on_q"]
0668a4bba21e44a028cb008b03165f63eba5b457
acute/models.py
acute/models.py
from django.db.models import fields from opal import models class Demographics(models.Demographics): pass class Location(models.Location): pass class Allergies(models.Allergies): pass class Diagnosis(models.Diagnosis): pass class PastMedicalHistory(models.PastMedicalHistory): pass class Treatment(models.Treatment): pass class Investigation(models.Investigation): pass class Clerking(models.EpisodeSubrecord): _icon = 'fa fa-user' referrer = fields.CharField(max_length=200, blank=True, null=True) clerked_by = fields.CharField(max_length=200, blank=True, null=True) consultant = fields.CharField(max_length=200, blank=True, null=True) class Plan(models.EpisodeSubrecord): _is_singleton = True _icon = 'fa fa-list-ol' plan = fields.TextField(blank=True, null=True) class Rescuscitation(models.EpisodeSubrecord): _icon = 'fa fa-warning' status = fields.CharField(max_length=200, blank=True, null=True) class NursingNotes(models.EpisodeSubrecord): _icon = 'fa fa-info-circle' notes = fields.TextField(blank=True, null=True) class DischargeDue(models.EpisodeSubrecord): _icon = 'fa fa-calendar' date = fields.DateField(blank=True, null=True)
from django.db.models import fields from opal import models class Demographics(models.Demographics): pass class Location(models.Location): pass class Allergies(models.Allergies): pass class Diagnosis(models.Diagnosis): pass class PastMedicalHistory(models.PastMedicalHistory): pass class Treatment(models.Treatment): pass class Investigation(models.Investigation): pass class Clerking(models.EpisodeSubrecord): _icon = 'fa fa-user' _title = 'Seen by' referrer = fields.CharField(max_length=200, blank=True, null=True) clerked_by = fields.CharField(max_length=200, blank=True, null=True) consultant = fields.CharField(max_length=200, blank=True, null=True) class Plan(models.EpisodeSubrecord): _is_singleton = True _icon = 'fa fa-list-ol' plan = fields.TextField(blank=True, null=True) class Rescuscitation(models.EpisodeSubrecord): _icon = 'fa fa-warning' status = fields.CharField(max_length=200, blank=True, null=True) class NursingNotes(models.EpisodeSubrecord): _icon = 'fa fa-info-circle' notes = fields.TextField(blank=True, null=True) class DischargeDue(models.EpisodeSubrecord): _icon = 'fa fa-calendar' date = fields.DateField(blank=True, null=True)
Rename Clerking -> Seen by
Rename Clerking -> Seen by closes #1
Python
agpl-3.0
openhealthcare/acute,openhealthcare/acute,openhealthcare/acute
from django.db.models import fields from opal import models class Demographics(models.Demographics): pass class Location(models.Location): pass class Allergies(models.Allergies): pass class Diagnosis(models.Diagnosis): pass class PastMedicalHistory(models.PastMedicalHistory): pass class Treatment(models.Treatment): pass class Investigation(models.Investigation): pass class Clerking(models.EpisodeSubrecord): _icon = 'fa fa-user' + _title = 'Seen by' referrer = fields.CharField(max_length=200, blank=True, null=True) clerked_by = fields.CharField(max_length=200, blank=True, null=True) consultant = fields.CharField(max_length=200, blank=True, null=True) class Plan(models.EpisodeSubrecord): _is_singleton = True _icon = 'fa fa-list-ol' plan = fields.TextField(blank=True, null=True) class Rescuscitation(models.EpisodeSubrecord): _icon = 'fa fa-warning' status = fields.CharField(max_length=200, blank=True, null=True) class NursingNotes(models.EpisodeSubrecord): _icon = 'fa fa-info-circle' notes = fields.TextField(blank=True, null=True) class DischargeDue(models.EpisodeSubrecord): _icon = 'fa fa-calendar' date = fields.DateField(blank=True, null=True)
Rename Clerking -> Seen by
## Code Before: from django.db.models import fields from opal import models class Demographics(models.Demographics): pass class Location(models.Location): pass class Allergies(models.Allergies): pass class Diagnosis(models.Diagnosis): pass class PastMedicalHistory(models.PastMedicalHistory): pass class Treatment(models.Treatment): pass class Investigation(models.Investigation): pass class Clerking(models.EpisodeSubrecord): _icon = 'fa fa-user' referrer = fields.CharField(max_length=200, blank=True, null=True) clerked_by = fields.CharField(max_length=200, blank=True, null=True) consultant = fields.CharField(max_length=200, blank=True, null=True) class Plan(models.EpisodeSubrecord): _is_singleton = True _icon = 'fa fa-list-ol' plan = fields.TextField(blank=True, null=True) class Rescuscitation(models.EpisodeSubrecord): _icon = 'fa fa-warning' status = fields.CharField(max_length=200, blank=True, null=True) class NursingNotes(models.EpisodeSubrecord): _icon = 'fa fa-info-circle' notes = fields.TextField(blank=True, null=True) class DischargeDue(models.EpisodeSubrecord): _icon = 'fa fa-calendar' date = fields.DateField(blank=True, null=True) ## Instruction: Rename Clerking -> Seen by ## Code After: from django.db.models import fields from opal import models class Demographics(models.Demographics): pass class Location(models.Location): pass class Allergies(models.Allergies): pass class Diagnosis(models.Diagnosis): pass class PastMedicalHistory(models.PastMedicalHistory): pass class Treatment(models.Treatment): pass class Investigation(models.Investigation): pass class Clerking(models.EpisodeSubrecord): _icon = 'fa fa-user' _title = 'Seen by' referrer = fields.CharField(max_length=200, blank=True, null=True) clerked_by = fields.CharField(max_length=200, blank=True, null=True) consultant = fields.CharField(max_length=200, blank=True, null=True) class Plan(models.EpisodeSubrecord): _is_singleton = True _icon = 'fa fa-list-ol' plan = fields.TextField(blank=True, null=True) class Rescuscitation(models.EpisodeSubrecord): _icon = 'fa fa-warning' status = fields.CharField(max_length=200, blank=True, null=True) class NursingNotes(models.EpisodeSubrecord): _icon = 'fa fa-info-circle' notes = fields.TextField(blank=True, null=True) class DischargeDue(models.EpisodeSubrecord): _icon = 'fa fa-calendar' date = fields.DateField(blank=True, null=True)
from django.db.models import fields from opal import models class Demographics(models.Demographics): pass class Location(models.Location): pass class Allergies(models.Allergies): pass class Diagnosis(models.Diagnosis): pass class PastMedicalHistory(models.PastMedicalHistory): pass class Treatment(models.Treatment): pass class Investigation(models.Investigation): pass class Clerking(models.EpisodeSubrecord): _icon = 'fa fa-user' + _title = 'Seen by' referrer = fields.CharField(max_length=200, blank=True, null=True) clerked_by = fields.CharField(max_length=200, blank=True, null=True) consultant = fields.CharField(max_length=200, blank=True, null=True) class Plan(models.EpisodeSubrecord): _is_singleton = True _icon = 'fa fa-list-ol' plan = fields.TextField(blank=True, null=True) class Rescuscitation(models.EpisodeSubrecord): _icon = 'fa fa-warning' status = fields.CharField(max_length=200, blank=True, null=True) class NursingNotes(models.EpisodeSubrecord): _icon = 'fa fa-info-circle' notes = fields.TextField(blank=True, null=True) class DischargeDue(models.EpisodeSubrecord): _icon = 'fa fa-calendar' date = fields.DateField(blank=True, null=True)
7d79c6072482d7a2de515d7ca567225100e7b6e9
tests/test_stock.py
tests/test_stock.py
import unittest from datetime import datetime from stock import Stock class StockTest(unittest.TestCase): def test_new_stock_price(self): """A new stock should have a price that is None. """ stock = Stock("GOOG") self.assertIsNone(stock.price) def test_stock_update(self): """An update should set the price on the stock object. Notes: We will be using the `datetime` module for the timestamp. """ stock = Stock("GOOG") stock.update(datetime(2014, 2, 12), price=10) self.assertEqual(10, stock.price) def test_negative_price_exception(self): """An update with a negative price should return a value error. """ stock = Stock("GOOG") try: stock.update(datetime(2014, 2, 12), price=-10) except ValueError: return self.fail("ValueError was not raised") if __name__ == "__main__": unittest.main()
import unittest from datetime import datetime from stock import Stock class StockTest(unittest.TestCase): def test_new_stock_price(self): """A new stock should have a price that is None. """ stock = Stock("GOOG") self.assertIsNone(stock.price) def test_stock_update(self): """An update should set the price on the stock object. Notes: We will be using the `datetime` module for the timestamp. """ stock = Stock("GOOG") stock.update(datetime(2014, 2, 12), price=10) self.assertEqual(10, stock.price) def test_negative_price_exception(self): """An update with a negative price should return a value error. """ stock = Stock("GOOG") self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10) if __name__ == "__main__": unittest.main()
Update negative price exception test to use assertRaises.
Update negative price exception test to use assertRaises.
Python
mit
bsmukasa/stock_alerter
import unittest from datetime import datetime from stock import Stock class StockTest(unittest.TestCase): def test_new_stock_price(self): """A new stock should have a price that is None. """ stock = Stock("GOOG") self.assertIsNone(stock.price) def test_stock_update(self): """An update should set the price on the stock object. Notes: We will be using the `datetime` module for the timestamp. """ stock = Stock("GOOG") stock.update(datetime(2014, 2, 12), price=10) self.assertEqual(10, stock.price) def test_negative_price_exception(self): """An update with a negative price should return a value error. """ stock = Stock("GOOG") + self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10) - try: - stock.update(datetime(2014, 2, 12), price=-10) - except ValueError: - return - self.fail("ValueError was not raised") if __name__ == "__main__": unittest.main()
Update negative price exception test to use assertRaises.
## Code Before: import unittest from datetime import datetime from stock import Stock class StockTest(unittest.TestCase): def test_new_stock_price(self): """A new stock should have a price that is None. """ stock = Stock("GOOG") self.assertIsNone(stock.price) def test_stock_update(self): """An update should set the price on the stock object. Notes: We will be using the `datetime` module for the timestamp. """ stock = Stock("GOOG") stock.update(datetime(2014, 2, 12), price=10) self.assertEqual(10, stock.price) def test_negative_price_exception(self): """An update with a negative price should return a value error. """ stock = Stock("GOOG") try: stock.update(datetime(2014, 2, 12), price=-10) except ValueError: return self.fail("ValueError was not raised") if __name__ == "__main__": unittest.main() ## Instruction: Update negative price exception test to use assertRaises. ## Code After: import unittest from datetime import datetime from stock import Stock class StockTest(unittest.TestCase): def test_new_stock_price(self): """A new stock should have a price that is None. """ stock = Stock("GOOG") self.assertIsNone(stock.price) def test_stock_update(self): """An update should set the price on the stock object. Notes: We will be using the `datetime` module for the timestamp. """ stock = Stock("GOOG") stock.update(datetime(2014, 2, 12), price=10) self.assertEqual(10, stock.price) def test_negative_price_exception(self): """An update with a negative price should return a value error. """ stock = Stock("GOOG") self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10) if __name__ == "__main__": unittest.main()
import unittest from datetime import datetime from stock import Stock class StockTest(unittest.TestCase): def test_new_stock_price(self): """A new stock should have a price that is None. """ stock = Stock("GOOG") self.assertIsNone(stock.price) def test_stock_update(self): """An update should set the price on the stock object. Notes: We will be using the `datetime` module for the timestamp. """ stock = Stock("GOOG") stock.update(datetime(2014, 2, 12), price=10) self.assertEqual(10, stock.price) def test_negative_price_exception(self): """An update with a negative price should return a value error. """ stock = Stock("GOOG") + self.assertRaises(ValueError, stock.update, datetime(2014, 2, 13), -10) - try: - stock.update(datetime(2014, 2, 12), price=-10) - except ValueError: - return - self.fail("ValueError was not raised") if __name__ == "__main__": unittest.main()
fe98703f789976df76a3275c8449d53f89a58ec1
behave_django/testcase.py
behave_django/testcase.py
from django.contrib.staticfiles.testing import StaticLiveServerTestCase class BehaviorDrivenTestCase(StaticLiveServerTestCase): """ Test case attached to the context during behave execution This test case prevents the regular tests from running. """ def runTest(*args, **kwargs): pass class ExistingDatabaseTestCase(BehaviorDrivenTestCase): """ Test case used for the --use-existing-database setup This test case prevents fixtures from being loaded to the database in use. """ def _fixture_setup(self): pass def _fixture_teardown(self): pass
from django.contrib.staticfiles.testing import StaticLiveServerTestCase class BehaviorDrivenTestCase(StaticLiveServerTestCase): """ Test case attached to the context during behave execution This test case prevents the regular tests from running. """ def runTest(self): pass class ExistingDatabaseTestCase(BehaviorDrivenTestCase): """ Test case used for the --use-existing-database setup This test case prevents fixtures from being loaded to the database in use. """ def _fixture_setup(self): pass def _fixture_teardown(self): pass
Fix Landscape complaint "Method has no argument"
Fix Landscape complaint "Method has no argument"
Python
mit
behave/behave-django,bittner/behave-django,bittner/behave-django,behave/behave-django
from django.contrib.staticfiles.testing import StaticLiveServerTestCase class BehaviorDrivenTestCase(StaticLiveServerTestCase): """ Test case attached to the context during behave execution This test case prevents the regular tests from running. """ - def runTest(*args, **kwargs): + def runTest(self): pass class ExistingDatabaseTestCase(BehaviorDrivenTestCase): """ Test case used for the --use-existing-database setup This test case prevents fixtures from being loaded to the database in use. """ def _fixture_setup(self): pass def _fixture_teardown(self): pass
Fix Landscape complaint "Method has no argument"
## Code Before: from django.contrib.staticfiles.testing import StaticLiveServerTestCase class BehaviorDrivenTestCase(StaticLiveServerTestCase): """ Test case attached to the context during behave execution This test case prevents the regular tests from running. """ def runTest(*args, **kwargs): pass class ExistingDatabaseTestCase(BehaviorDrivenTestCase): """ Test case used for the --use-existing-database setup This test case prevents fixtures from being loaded to the database in use. """ def _fixture_setup(self): pass def _fixture_teardown(self): pass ## Instruction: Fix Landscape complaint "Method has no argument" ## Code After: from django.contrib.staticfiles.testing import StaticLiveServerTestCase class BehaviorDrivenTestCase(StaticLiveServerTestCase): """ Test case attached to the context during behave execution This test case prevents the regular tests from running. """ def runTest(self): pass class ExistingDatabaseTestCase(BehaviorDrivenTestCase): """ Test case used for the --use-existing-database setup This test case prevents fixtures from being loaded to the database in use. """ def _fixture_setup(self): pass def _fixture_teardown(self): pass
from django.contrib.staticfiles.testing import StaticLiveServerTestCase class BehaviorDrivenTestCase(StaticLiveServerTestCase): """ Test case attached to the context during behave execution This test case prevents the regular tests from running. """ - def runTest(*args, **kwargs): + def runTest(self): pass class ExistingDatabaseTestCase(BehaviorDrivenTestCase): """ Test case used for the --use-existing-database setup This test case prevents fixtures from being loaded to the database in use. """ def _fixture_setup(self): pass def _fixture_teardown(self): pass
444bba442e581226b650af929c85ccc885c60297
microcosm/tracing.py
microcosm/tracing.py
from jaeger_client.config import ( DEFAULT_REPORTING_HOST, DEFAULT_REPORTING_PORT, DEFAULT_SAMPLING_PORT, Config, ) from microcosm.api import binding, defaults, typed SPAN_NAME = "span_name" @binding("tracer") @defaults( sample_type="ratelimiting", sample_param=typed(int, 10), sampling_port=typed(int, DEFAULT_SAMPLING_PORT), reporting_port=typed(int, DEFAULT_REPORTING_PORT), reporting_host=DEFAULT_REPORTING_HOST, ) def configure_tracing(graph): """ See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about available sampling strategies. """ config = Config( config={ "sampler": { "type": graph.config.tracer.sample_type, "param": graph.config.tracer.sample_param, }, "local_agent": { "sampling_port": graph.config.tracer.sampling_port, "reporting_port": graph.config.tracer.reporting_port, "reporting_host": graph.config.tracer.reporting_host, }, "logging": True, }, service_name=graph.metadata.name, ) return config.initialize_tracer()
from jaeger_client.config import ( DEFAULT_REPORTING_HOST, DEFAULT_REPORTING_PORT, DEFAULT_SAMPLING_PORT, Config, ) from microcosm.api import binding, defaults, typed from microcosm.config.types import boolean SPAN_NAME = "span_name" @binding("tracer") @defaults( sample_type="ratelimiting", sample_param=typed(int, 10), sampling_port=typed(int, DEFAULT_SAMPLING_PORT), reporting_port=typed(int, DEFAULT_REPORTING_PORT), reporting_host=DEFAULT_REPORTING_HOST, logging_enabled=typed(boolean, False), ) def configure_tracing(graph): """ See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about available sampling strategies. """ config = Config( config={ "sampler": { "type": graph.config.tracer.sample_type, "param": graph.config.tracer.sample_param, }, "local_agent": { "sampling_port": graph.config.tracer.sampling_port, "reporting_port": graph.config.tracer.reporting_port, "reporting_host": graph.config.tracer.reporting_host, }, "logging": graph.config.tracer.logging_enabled, }, service_name=graph.metadata.name, ) return config.initialize_tracer()
Disable jaeger logging by default
Disable jaeger logging by default
Python
apache-2.0
globality-corp/microcosm,globality-corp/microcosm
from jaeger_client.config import ( DEFAULT_REPORTING_HOST, DEFAULT_REPORTING_PORT, DEFAULT_SAMPLING_PORT, Config, ) from microcosm.api import binding, defaults, typed + from microcosm.config.types import boolean SPAN_NAME = "span_name" @binding("tracer") @defaults( sample_type="ratelimiting", sample_param=typed(int, 10), sampling_port=typed(int, DEFAULT_SAMPLING_PORT), reporting_port=typed(int, DEFAULT_REPORTING_PORT), reporting_host=DEFAULT_REPORTING_HOST, + logging_enabled=typed(boolean, False), ) def configure_tracing(graph): """ See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about available sampling strategies. """ config = Config( config={ "sampler": { "type": graph.config.tracer.sample_type, "param": graph.config.tracer.sample_param, }, "local_agent": { "sampling_port": graph.config.tracer.sampling_port, "reporting_port": graph.config.tracer.reporting_port, "reporting_host": graph.config.tracer.reporting_host, }, - "logging": True, + "logging": graph.config.tracer.logging_enabled, }, service_name=graph.metadata.name, ) return config.initialize_tracer()
Disable jaeger logging by default
## Code Before: from jaeger_client.config import ( DEFAULT_REPORTING_HOST, DEFAULT_REPORTING_PORT, DEFAULT_SAMPLING_PORT, Config, ) from microcosm.api import binding, defaults, typed SPAN_NAME = "span_name" @binding("tracer") @defaults( sample_type="ratelimiting", sample_param=typed(int, 10), sampling_port=typed(int, DEFAULT_SAMPLING_PORT), reporting_port=typed(int, DEFAULT_REPORTING_PORT), reporting_host=DEFAULT_REPORTING_HOST, ) def configure_tracing(graph): """ See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about available sampling strategies. """ config = Config( config={ "sampler": { "type": graph.config.tracer.sample_type, "param": graph.config.tracer.sample_param, }, "local_agent": { "sampling_port": graph.config.tracer.sampling_port, "reporting_port": graph.config.tracer.reporting_port, "reporting_host": graph.config.tracer.reporting_host, }, "logging": True, }, service_name=graph.metadata.name, ) return config.initialize_tracer() ## Instruction: Disable jaeger logging by default ## Code After: from jaeger_client.config import ( DEFAULT_REPORTING_HOST, DEFAULT_REPORTING_PORT, DEFAULT_SAMPLING_PORT, Config, ) from microcosm.api import binding, defaults, typed from microcosm.config.types import boolean SPAN_NAME = "span_name" @binding("tracer") @defaults( sample_type="ratelimiting", sample_param=typed(int, 10), sampling_port=typed(int, DEFAULT_SAMPLING_PORT), reporting_port=typed(int, DEFAULT_REPORTING_PORT), reporting_host=DEFAULT_REPORTING_HOST, logging_enabled=typed(boolean, False), ) def configure_tracing(graph): """ See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about available sampling strategies. """ config = Config( config={ "sampler": { "type": graph.config.tracer.sample_type, "param": graph.config.tracer.sample_param, }, "local_agent": { "sampling_port": graph.config.tracer.sampling_port, "reporting_port": graph.config.tracer.reporting_port, "reporting_host": graph.config.tracer.reporting_host, }, "logging": graph.config.tracer.logging_enabled, }, service_name=graph.metadata.name, ) return config.initialize_tracer()
from jaeger_client.config import ( DEFAULT_REPORTING_HOST, DEFAULT_REPORTING_PORT, DEFAULT_SAMPLING_PORT, Config, ) from microcosm.api import binding, defaults, typed + from microcosm.config.types import boolean SPAN_NAME = "span_name" @binding("tracer") @defaults( sample_type="ratelimiting", sample_param=typed(int, 10), sampling_port=typed(int, DEFAULT_SAMPLING_PORT), reporting_port=typed(int, DEFAULT_REPORTING_PORT), reporting_host=DEFAULT_REPORTING_HOST, + logging_enabled=typed(boolean, False), ) def configure_tracing(graph): """ See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about available sampling strategies. """ config = Config( config={ "sampler": { "type": graph.config.tracer.sample_type, "param": graph.config.tracer.sample_param, }, "local_agent": { "sampling_port": graph.config.tracer.sampling_port, "reporting_port": graph.config.tracer.reporting_port, "reporting_host": graph.config.tracer.reporting_host, }, - "logging": True, + "logging": graph.config.tracer.logging_enabled, }, service_name=graph.metadata.name, ) return config.initialize_tracer()
bb4bff73a1eefad6188f1d1544f3b4106b606d36
driller/LibcSimProc.py
driller/LibcSimProc.py
import simuvex from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength class DrillerRead(simuvex.SimProcedure): ''' A custom version of read which has a symbolic return value. ''' def run(self, fd, dst, length): self.argument_types = {0: SimTypeFd(), 1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)), 2: SimTypeLength(self.state.arch)} self.return_type = SimTypeLength(self.state.arch) if self.state.se.max_int(length) == 0: return self.state.se.BVV(0, self.state.arch.bits) sym_length = self.state.se.BV("sym_length", self.state.arch.bits) self.state.add_constraints(sym_length <= length) self.state.add_constraints(sym_length >= 0) _ = self.state.posix.pos(fd) data = self.state.posix.read(fd, length) self.state.store_mem(dst, data) return sym_length simprocedures = [("read", DrillerRead)]
import simuvex from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength class DrillerRead(simuvex.SimProcedure): ''' A custom version of read which has a symbolic return value. ''' def run(self, fd, dst, length): self.argument_types = {0: SimTypeFd(), 1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)), 2: SimTypeLength(self.state.arch)} self.return_type = SimTypeLength(self.state.arch) if self.state.se.max_int(length) == 0: return self.state.se.BVV(0, self.state.arch.bits) sym_length = self.state.se.BV("sym_length", self.state.arch.bits) self.state.add_constraints(sym_length <= length) self.state.add_constraints(sym_length >= 0) data = self.state.posix.read(fd, length, dst_addr=dst) return sym_length simprocedures = [("read", DrillerRead)]
Update libc's DrillerRead to use the new posix read calling convention to support variable read
Update libc's DrillerRead to use the new posix read calling convention to support variable read
Python
bsd-2-clause
shellphish/driller
import simuvex from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength class DrillerRead(simuvex.SimProcedure): ''' A custom version of read which has a symbolic return value. ''' def run(self, fd, dst, length): self.argument_types = {0: SimTypeFd(), 1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)), 2: SimTypeLength(self.state.arch)} self.return_type = SimTypeLength(self.state.arch) if self.state.se.max_int(length) == 0: return self.state.se.BVV(0, self.state.arch.bits) sym_length = self.state.se.BV("sym_length", self.state.arch.bits) self.state.add_constraints(sym_length <= length) self.state.add_constraints(sym_length >= 0) - _ = self.state.posix.pos(fd) - data = self.state.posix.read(fd, length) + data = self.state.posix.read(fd, length, dst_addr=dst) - self.state.store_mem(dst, data) return sym_length simprocedures = [("read", DrillerRead)]
Update libc's DrillerRead to use the new posix read calling convention to support variable read
## Code Before: import simuvex from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength class DrillerRead(simuvex.SimProcedure): ''' A custom version of read which has a symbolic return value. ''' def run(self, fd, dst, length): self.argument_types = {0: SimTypeFd(), 1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)), 2: SimTypeLength(self.state.arch)} self.return_type = SimTypeLength(self.state.arch) if self.state.se.max_int(length) == 0: return self.state.se.BVV(0, self.state.arch.bits) sym_length = self.state.se.BV("sym_length", self.state.arch.bits) self.state.add_constraints(sym_length <= length) self.state.add_constraints(sym_length >= 0) _ = self.state.posix.pos(fd) data = self.state.posix.read(fd, length) self.state.store_mem(dst, data) return sym_length simprocedures = [("read", DrillerRead)] ## Instruction: Update libc's DrillerRead to use the new posix read calling convention to support variable read ## Code After: import simuvex from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength class DrillerRead(simuvex.SimProcedure): ''' A custom version of read which has a symbolic return value. ''' def run(self, fd, dst, length): self.argument_types = {0: SimTypeFd(), 1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)), 2: SimTypeLength(self.state.arch)} self.return_type = SimTypeLength(self.state.arch) if self.state.se.max_int(length) == 0: return self.state.se.BVV(0, self.state.arch.bits) sym_length = self.state.se.BV("sym_length", self.state.arch.bits) self.state.add_constraints(sym_length <= length) self.state.add_constraints(sym_length >= 0) data = self.state.posix.read(fd, length, dst_addr=dst) return sym_length simprocedures = [("read", DrillerRead)]
import simuvex from simuvex.s_type import SimTypeFd, SimTypeChar, SimTypeArray, SimTypeLength class DrillerRead(simuvex.SimProcedure): ''' A custom version of read which has a symbolic return value. ''' def run(self, fd, dst, length): self.argument_types = {0: SimTypeFd(), 1: self.ty_ptr(SimTypeArray(SimTypeChar(), length)), 2: SimTypeLength(self.state.arch)} self.return_type = SimTypeLength(self.state.arch) if self.state.se.max_int(length) == 0: return self.state.se.BVV(0, self.state.arch.bits) sym_length = self.state.se.BV("sym_length", self.state.arch.bits) self.state.add_constraints(sym_length <= length) self.state.add_constraints(sym_length >= 0) - _ = self.state.posix.pos(fd) - data = self.state.posix.read(fd, length) + data = self.state.posix.read(fd, length, dst_addr=dst) ? ++++++++++++++ - self.state.store_mem(dst, data) return sym_length simprocedures = [("read", DrillerRead)]
4c0ad1cbf346c6d34a924c77081f2dd37e7f86ac
mochi/utils/pycloader.py
mochi/utils/pycloader.py
import os from mochi.core import pyc_compile_monkeypatch def get_function(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the imported function. """ return getattr(get_module(name, file_path), name) def get_module(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the Python module. """ base_path = os.path.dirname(file_path) mochi_name = os.path.join(base_path, name + '.mochi') py_name = os.path.join(base_path, name + '.pyc') pyc_compile_monkeypatch(mochi_name, py_name) return __import__(name)
import os from mochi.core import init, pyc_compile_monkeypatch def get_function(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the imported function. """ return getattr(get_module(name, file_path), name) def get_module(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the Python module. """ base_path = os.path.dirname(file_path) mochi_name = os.path.join(base_path, name + '.mochi') py_name = os.path.join(base_path, name + '.pyc') pyc_compile_monkeypatch(mochi_name, py_name) return __import__(name) init()
Fix a bug introduced by fixing a bug that always execute eventlet's monkey_patch
Fix a bug introduced by fixing a bug that always execute eventlet's monkey_patch
Python
mit
slideclick/mochi,i2y/mochi,pya/mochi,slideclick/mochi,i2y/mochi,pya/mochi
import os - from mochi.core import pyc_compile_monkeypatch + from mochi.core import init, pyc_compile_monkeypatch def get_function(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the imported function. """ return getattr(get_module(name, file_path), name) def get_module(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the Python module. """ base_path = os.path.dirname(file_path) mochi_name = os.path.join(base_path, name + '.mochi') py_name = os.path.join(base_path, name + '.pyc') pyc_compile_monkeypatch(mochi_name, py_name) return __import__(name) + init() +
Fix a bug introduced by fixing a bug that always execute eventlet's monkey_patch
## Code Before: import os from mochi.core import pyc_compile_monkeypatch def get_function(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the imported function. """ return getattr(get_module(name, file_path), name) def get_module(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the Python module. """ base_path = os.path.dirname(file_path) mochi_name = os.path.join(base_path, name + '.mochi') py_name = os.path.join(base_path, name + '.pyc') pyc_compile_monkeypatch(mochi_name, py_name) return __import__(name) ## Instruction: Fix a bug introduced by fixing a bug that always execute eventlet's monkey_patch ## Code After: import os from mochi.core import init, pyc_compile_monkeypatch def get_function(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the imported function. """ return getattr(get_module(name, file_path), name) def get_module(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the Python module. """ base_path = os.path.dirname(file_path) mochi_name = os.path.join(base_path, name + '.mochi') py_name = os.path.join(base_path, name + '.pyc') pyc_compile_monkeypatch(mochi_name, py_name) return __import__(name) init()
import os - from mochi.core import pyc_compile_monkeypatch + from mochi.core import init, pyc_compile_monkeypatch ? ++++++ def get_function(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the imported function. """ return getattr(get_module(name, file_path), name) def get_module(name, file_path): """Python function from Mochi. Compiles a Mochi file to Python bytecode and returns the Python module. """ base_path = os.path.dirname(file_path) mochi_name = os.path.join(base_path, name + '.mochi') py_name = os.path.join(base_path, name + '.pyc') pyc_compile_monkeypatch(mochi_name, py_name) return __import__(name) + + init()
1599bc03b0a1cd202836479fba2406457a17f118
user_map/tests/urls.py
user_map/tests/urls.py
from django.conf.urls import patterns, include, url urlpatterns = patterns( '', url(r'^user-map/', include('user_map.urls', namespace='user_map')) )
from django.conf.urls import patterns, include, url urlpatterns = patterns( '', url(r'^user-map/', include('user_map.urls', namespace='user_map')), url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}, name='my_login', ), )
Add login url for testing.
Add login url for testing.
Python
lgpl-2.1
akbargumbira/django-user-map,akbargumbira/django-user-map,akbargumbira/django-user-map,akbargumbira/django-user-map
from django.conf.urls import patterns, include, url urlpatterns = patterns( '', - url(r'^user-map/', include('user_map.urls', namespace='user_map')) + url(r'^user-map/', include('user_map.urls', namespace='user_map')), + url(r'^login/$', + 'django.contrib.auth.views.login', + {'template_name': 'admin/login.html'}, + name='my_login', + ), )
Add login url for testing.
## Code Before: from django.conf.urls import patterns, include, url urlpatterns = patterns( '', url(r'^user-map/', include('user_map.urls', namespace='user_map')) ) ## Instruction: Add login url for testing. ## Code After: from django.conf.urls import patterns, include, url urlpatterns = patterns( '', url(r'^user-map/', include('user_map.urls', namespace='user_map')), url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}, name='my_login', ), )
from django.conf.urls import patterns, include, url urlpatterns = patterns( '', - url(r'^user-map/', include('user_map.urls', namespace='user_map')) + url(r'^user-map/', include('user_map.urls', namespace='user_map')), ? + + url(r'^login/$', + 'django.contrib.auth.views.login', + {'template_name': 'admin/login.html'}, + name='my_login', + ), )
6d3180ffd84e126ee4441a367a48a750d270892e
sumy/document/_sentence.py
sumy/document/_sentence.py
from __future__ import absolute_import from __future__ import division, print_function, unicode_literals from itertools import chain from .._compat import to_unicode, to_string, unicode_compatible @unicode_compatible class Sentence(object): __slots__ = ("_words", "_is_heading",) def __init__(self, words, is_heading=False): self._words = tuple(map(to_unicode, words)) self._is_heading = bool(is_heading) @property def words(self): return self._words @property def is_heading(self): return self._is_heading def __unicode__(self): return " ".join(self._words) def __repr__(self): return to_string("<Sentence: %s>") % self.__str__()
from __future__ import absolute_import from __future__ import division, print_function, unicode_literals import re from itertools import chain from .._compat import to_unicode, to_string, unicode_compatible _WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE) @unicode_compatible class Sentence(object): __slots__ = ("_words", "_is_heading",) def __init__(self, words, is_heading=False): self._words = tuple(map(to_unicode, words)) self._is_heading = bool(is_heading) @property def words(self): return tuple(filter(self._is_word, self._words)) @property def is_heading(self): return self._is_heading def _is_word(self, word): return bool(_WORD_PATTERN.search(word)) def __unicode__(self): return " ".join(self._words) def __repr__(self): return to_string("<Sentence: %s>") % self.__str__()
Return only alphabetic words from sentence
Return only alphabetic words from sentence
Python
apache-2.0
miso-belica/sumy,miso-belica/sumy
from __future__ import absolute_import from __future__ import division, print_function, unicode_literals + import re + from itertools import chain from .._compat import to_unicode, to_string, unicode_compatible + + + _WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE) @unicode_compatible class Sentence(object): __slots__ = ("_words", "_is_heading",) def __init__(self, words, is_heading=False): self._words = tuple(map(to_unicode, words)) self._is_heading = bool(is_heading) @property def words(self): - return self._words + return tuple(filter(self._is_word, self._words)) @property def is_heading(self): return self._is_heading + + def _is_word(self, word): + return bool(_WORD_PATTERN.search(word)) def __unicode__(self): return " ".join(self._words) def __repr__(self): return to_string("<Sentence: %s>") % self.__str__()
Return only alphabetic words from sentence
## Code Before: from __future__ import absolute_import from __future__ import division, print_function, unicode_literals from itertools import chain from .._compat import to_unicode, to_string, unicode_compatible @unicode_compatible class Sentence(object): __slots__ = ("_words", "_is_heading",) def __init__(self, words, is_heading=False): self._words = tuple(map(to_unicode, words)) self._is_heading = bool(is_heading) @property def words(self): return self._words @property def is_heading(self): return self._is_heading def __unicode__(self): return " ".join(self._words) def __repr__(self): return to_string("<Sentence: %s>") % self.__str__() ## Instruction: Return only alphabetic words from sentence ## Code After: from __future__ import absolute_import from __future__ import division, print_function, unicode_literals import re from itertools import chain from .._compat import to_unicode, to_string, unicode_compatible _WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE) @unicode_compatible class Sentence(object): __slots__ = ("_words", "_is_heading",) def __init__(self, words, is_heading=False): self._words = tuple(map(to_unicode, words)) self._is_heading = bool(is_heading) @property def words(self): return tuple(filter(self._is_word, self._words)) @property def is_heading(self): return self._is_heading def _is_word(self, word): return bool(_WORD_PATTERN.search(word)) def __unicode__(self): return " ".join(self._words) def __repr__(self): return to_string("<Sentence: %s>") % self.__str__()
from __future__ import absolute_import from __future__ import division, print_function, unicode_literals + import re + from itertools import chain from .._compat import to_unicode, to_string, unicode_compatible + + + _WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE) @unicode_compatible class Sentence(object): __slots__ = ("_words", "_is_heading",) def __init__(self, words, is_heading=False): self._words = tuple(map(to_unicode, words)) self._is_heading = bool(is_heading) @property def words(self): - return self._words + return tuple(filter(self._is_word, self._words)) @property def is_heading(self): return self._is_heading + + def _is_word(self, word): + return bool(_WORD_PATTERN.search(word)) def __unicode__(self): return " ".join(self._words) def __repr__(self): return to_string("<Sentence: %s>") % self.__str__()
9856361b48bb481f7913eaf69be668225c5bb818
api/files/urls.py
api/files/urls.py
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
Remove the files list endpoint
Remove the files list endpoint
Python
apache-2.0
acshi/osf.io,aaxelb/osf.io,cslzchen/osf.io,felliott/osf.io,brandonPurvis/osf.io,wearpants/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,abought/osf.io,TomBaxter/osf.io,SSJohns/osf.io,amyshi188/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,erinspace/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,cosenal/osf.io,jnayak1/osf.io,billyhunt/osf.io,wearpants/osf.io,mfraezz/osf.io,adlius/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,mluo613/osf.io,zachjanicki/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,rdhyee/osf.io,mattclark/osf.io,GageGaskins/osf.io,caneruguz/osf.io,alexschiller/osf.io,chrisseto/osf.io,crcresearch/osf.io,abought/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,alexschiller/osf.io,wearpants/osf.io,felliott/osf.io,njantrania/osf.io,chrisseto/osf.io,crcresearch/osf.io,kwierman/osf.io,amyshi188/osf.io,SSJohns/osf.io,ticklemepierce/osf.io,njantrania/osf.io,laurenrevere/osf.io,abought/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,kwierman/osf.io,Ghalko/osf.io,adlius/osf.io,emetsger/osf.io,samchrisinger/osf.io,icereval/osf.io,jnayak1/osf.io,Ghalko/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,zamattiac/osf.io,caseyrollins/osf.io,doublebits/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,abought/osf.io,samanehsan/osf.io,cwisecarver/osf.io,acshi/osf.io,hmoco/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,danielneis/osf.io,mfraezz/osf.io,cosenal/osf.io,jnayak1/osf.io,leb2dg/osf.io,danielneis/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,leb2dg/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,cosenal/osf.io,mattclark/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,samchrisinger/osf.io,chennan47/osf.io,brianjgeiger/osf.io,wearpants/osf.io,erinspace/osf.io,mluo613/osf.io,danielneis/osf.io,KAsante95/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,billyhunt/osf.io,kwierman/osf.io,samchrisinger/osf.io,arpitar/osf.io,emetsger/osf.io,felliott/osf.io,samanehsan/osf.io,Nesiehr/osf.io,saradbowman/osf.io,adlius/osf.io,RomanZWang/osf.io,danielneis/osf.io,binoculars/osf.io,zamattiac/osf.io,TomBaxter/osf.io,petermalcolm/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,acshi/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,Ghalko/osf.io,amyshi188/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,emetsger/osf.io,baylee-d/osf.io,petermalcolm/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,ticklemepierce/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,SSJohns/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,rdhyee/osf.io,icereval/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,chennan47/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,petermalcolm/osf.io,saradbowman/osf.io,hmoco/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,arpitar/osf.io,doublebits/osf.io,acshi/osf.io,KAsante95/osf.io,KAsante95/osf.io,sloria/osf.io,emetsger/osf.io,kch8qx/osf.io,hmoco/osf.io,adlius/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,hmoco/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,mfraezz/osf.io,kch8qx/osf.io,SSJohns/osf.io,arpitar/osf.io,TomBaxter/osf.io,aaxelb/osf.io,mfraezz/osf.io,samanehsan/osf.io,caseyrygt/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,mluo613/osf.io,caneruguz/osf.io,billyhunt/osf.io,pattisdr/osf.io,RomanZWang/osf.io,aaxelb/osf.io,samanehsan/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,binoculars/osf.io,KAsante95/osf.io,arpitar/osf.io,alexschiller/osf.io,kch8qx/osf.io,mluo613/osf.io,njantrania/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,binoculars/osf.io,mattclark/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,kch8qx/osf.io,doublebits/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,GageGaskins/osf.io,billyhunt/osf.io,sloria/osf.io,asanfilippo7/osf.io,pattisdr/osf.io,doublebits/osf.io,mluke93/osf.io,sloria/osf.io,caseyrygt/osf.io,alexschiller/osf.io,rdhyee/osf.io,rdhyee/osf.io,Nesiehr/osf.io,mluke93/osf.io,caseyrygt/osf.io,icereval/osf.io,caneruguz/osf.io
from django.conf.urls import url from api.files import views urlpatterns = [ - url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
Remove the files list endpoint
## Code Before: from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ] ## Instruction: Remove the files list endpoint ## Code After: from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
from django.conf.urls import url from api.files import views urlpatterns = [ - url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
577e2a03c15e4e489d0df4c3c2a2bea8b9aa54b6
fluent_utils/softdeps/any_urlfield.py
fluent_utils/softdeps/any_urlfield.py
from __future__ import absolute_import from django.db import models from fluent_utils.django_compat import is_installed if is_installed('any_urlfield'): from any_urlfield.models import AnyUrlField as BaseUrlField else: BaseUrlField = models.URLField # subclassing here so South or Django migrations detect a single class. class AnyUrlField(BaseUrlField): """ A CharField that can either refer to a CMS page ID, or external URL. If *django-any-urlfield* is not installed, only regular URLs can be used. """ def __init__(self, *args, **kwargs): if 'max_length' not in kwargs: kwargs['max_length'] = 300 # Standardize super(AnyUrlField, self).__init__(*args, **kwargs) def south_field_triple(self): # Masquerade as normal URLField, so the soft-dependency also exists in the migrations. from south.modelsinspector import introspector path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) args, kwargs = introspector(self) return (path, args, kwargs) def deconstruct(self): # For Django 1.7 migrations, masquerade as normal URLField too name, path, args, kwargs = super(AnyUrlField, self).deconstruct() path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) return name, path, args, kwargs
from __future__ import absolute_import from django.db import models from fluent_utils.django_compat import is_installed if is_installed('any_urlfield'): from any_urlfield.models import AnyUrlField as BaseUrlField else: BaseUrlField = models.URLField # subclassing here so South or Django migrations detect a single class. class AnyUrlField(BaseUrlField): """ A CharField that can either refer to a CMS page ID, or external URL. If *django-any-urlfield* is not installed, only regular URLs can be used. """ def __init__(self, *args, **kwargs): if 'max_length' not in kwargs: kwargs['max_length'] = 300 # Standardize super(AnyUrlField, self).__init__(*args, **kwargs) def south_field_triple(self): # Masquerade as normal URLField, so the soft-dependency also exists in the migrations. from south.modelsinspector import introspector path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) args, kwargs = introspector(self) return (path, args, kwargs) def deconstruct(self): # For Django 1.7 migrations, masquerade as normal URLField too name, path, args, kwargs = super(AnyUrlField, self).deconstruct() path = "django.db.models.{}".format(models.URLField.__name__) return name, path, args, kwargs
Fix AnyUrlField migration issue on Django 1.11.
Fix AnyUrlField migration issue on Django 1.11.
Python
apache-2.0
edoburu/django-fluent-utils
from __future__ import absolute_import from django.db import models from fluent_utils.django_compat import is_installed if is_installed('any_urlfield'): from any_urlfield.models import AnyUrlField as BaseUrlField else: BaseUrlField = models.URLField # subclassing here so South or Django migrations detect a single class. class AnyUrlField(BaseUrlField): """ A CharField that can either refer to a CMS page ID, or external URL. If *django-any-urlfield* is not installed, only regular URLs can be used. """ def __init__(self, *args, **kwargs): if 'max_length' not in kwargs: kwargs['max_length'] = 300 # Standardize super(AnyUrlField, self).__init__(*args, **kwargs) def south_field_triple(self): # Masquerade as normal URLField, so the soft-dependency also exists in the migrations. from south.modelsinspector import introspector path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) args, kwargs = introspector(self) return (path, args, kwargs) def deconstruct(self): # For Django 1.7 migrations, masquerade as normal URLField too name, path, args, kwargs = super(AnyUrlField, self).deconstruct() - path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) + path = "django.db.models.{}".format(models.URLField.__name__) return name, path, args, kwargs
Fix AnyUrlField migration issue on Django 1.11.
## Code Before: from __future__ import absolute_import from django.db import models from fluent_utils.django_compat import is_installed if is_installed('any_urlfield'): from any_urlfield.models import AnyUrlField as BaseUrlField else: BaseUrlField = models.URLField # subclassing here so South or Django migrations detect a single class. class AnyUrlField(BaseUrlField): """ A CharField that can either refer to a CMS page ID, or external URL. If *django-any-urlfield* is not installed, only regular URLs can be used. """ def __init__(self, *args, **kwargs): if 'max_length' not in kwargs: kwargs['max_length'] = 300 # Standardize super(AnyUrlField, self).__init__(*args, **kwargs) def south_field_triple(self): # Masquerade as normal URLField, so the soft-dependency also exists in the migrations. from south.modelsinspector import introspector path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) args, kwargs = introspector(self) return (path, args, kwargs) def deconstruct(self): # For Django 1.7 migrations, masquerade as normal URLField too name, path, args, kwargs = super(AnyUrlField, self).deconstruct() path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) return name, path, args, kwargs ## Instruction: Fix AnyUrlField migration issue on Django 1.11. ## Code After: from __future__ import absolute_import from django.db import models from fluent_utils.django_compat import is_installed if is_installed('any_urlfield'): from any_urlfield.models import AnyUrlField as BaseUrlField else: BaseUrlField = models.URLField # subclassing here so South or Django migrations detect a single class. class AnyUrlField(BaseUrlField): """ A CharField that can either refer to a CMS page ID, or external URL. If *django-any-urlfield* is not installed, only regular URLs can be used. """ def __init__(self, *args, **kwargs): if 'max_length' not in kwargs: kwargs['max_length'] = 300 # Standardize super(AnyUrlField, self).__init__(*args, **kwargs) def south_field_triple(self): # Masquerade as normal URLField, so the soft-dependency also exists in the migrations. from south.modelsinspector import introspector path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) args, kwargs = introspector(self) return (path, args, kwargs) def deconstruct(self): # For Django 1.7 migrations, masquerade as normal URLField too name, path, args, kwargs = super(AnyUrlField, self).deconstruct() path = "django.db.models.{}".format(models.URLField.__name__) return name, path, args, kwargs
from __future__ import absolute_import from django.db import models from fluent_utils.django_compat import is_installed if is_installed('any_urlfield'): from any_urlfield.models import AnyUrlField as BaseUrlField else: BaseUrlField = models.URLField # subclassing here so South or Django migrations detect a single class. class AnyUrlField(BaseUrlField): """ A CharField that can either refer to a CMS page ID, or external URL. If *django-any-urlfield* is not installed, only regular URLs can be used. """ def __init__(self, *args, **kwargs): if 'max_length' not in kwargs: kwargs['max_length'] = 300 # Standardize super(AnyUrlField, self).__init__(*args, **kwargs) def south_field_triple(self): # Masquerade as normal URLField, so the soft-dependency also exists in the migrations. from south.modelsinspector import introspector path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) args, kwargs = introspector(self) return (path, args, kwargs) def deconstruct(self): # For Django 1.7 migrations, masquerade as normal URLField too name, path, args, kwargs = super(AnyUrlField, self).deconstruct() - path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__) + path = "django.db.models.{}".format(models.URLField.__name__) return name, path, args, kwargs
e8d5732e94d14a3a72999bd270af1fd3f3a2e09f
fileutil_posix.py
fileutil_posix.py
import sys, os, subprocess def run(args, workdir=None): p = subprocess.Popen(args, close_fds=True, cwd=workdir) return p.wait() if sys.platform == "darwin": shell_open_command = "open" else: shell_open_command = "xdg-open" def shell_open(path, workdir=None): return run([shell_open_command, path], workdir=workdir) == 0 def get_user_config_dir(name=""): path = os.environ.get("HOME", "") if name: path = os.path.join(path, "." + name) return os.path.realpath(path) __all__ = ( "shell_open", "get_user_config_dir", )
import sys, os, subprocess def run(args, workdir=None): p = subprocess.Popen(args, close_fds=True, cwd=workdir) return p.wait() if sys.platform == "darwin": shell_open_command = "open" else: shell_open_command = "xdg-open" def shell_open(path, workdir=None): return run([shell_open_command, path], workdir=workdir) == 0 def get_user_config_dir(name=""): path = os.environ.get("XDG_CONFIG_HOME") if not path: path = os.path.join(os.environ.get("HOME", "/"), ".config") if name: path = os.path.join(path, name) return os.path.realpath(path) __all__ = ( "shell_open", "get_user_config_dir", )
Use XDG_CONFIG_HOME for configuration directory.
Use XDG_CONFIG_HOME for configuration directory.
Python
mit
shaurz/devo
import sys, os, subprocess def run(args, workdir=None): p = subprocess.Popen(args, close_fds=True, cwd=workdir) return p.wait() if sys.platform == "darwin": shell_open_command = "open" else: shell_open_command = "xdg-open" def shell_open(path, workdir=None): return run([shell_open_command, path], workdir=workdir) == 0 def get_user_config_dir(name=""): - path = os.environ.get("HOME", "") + path = os.environ.get("XDG_CONFIG_HOME") + if not path: + path = os.path.join(os.environ.get("HOME", "/"), ".config") if name: - path = os.path.join(path, "." + name) + path = os.path.join(path, name) return os.path.realpath(path) __all__ = ( "shell_open", "get_user_config_dir", )
Use XDG_CONFIG_HOME for configuration directory.
## Code Before: import sys, os, subprocess def run(args, workdir=None): p = subprocess.Popen(args, close_fds=True, cwd=workdir) return p.wait() if sys.platform == "darwin": shell_open_command = "open" else: shell_open_command = "xdg-open" def shell_open(path, workdir=None): return run([shell_open_command, path], workdir=workdir) == 0 def get_user_config_dir(name=""): path = os.environ.get("HOME", "") if name: path = os.path.join(path, "." + name) return os.path.realpath(path) __all__ = ( "shell_open", "get_user_config_dir", ) ## Instruction: Use XDG_CONFIG_HOME for configuration directory. ## Code After: import sys, os, subprocess def run(args, workdir=None): p = subprocess.Popen(args, close_fds=True, cwd=workdir) return p.wait() if sys.platform == "darwin": shell_open_command = "open" else: shell_open_command = "xdg-open" def shell_open(path, workdir=None): return run([shell_open_command, path], workdir=workdir) == 0 def get_user_config_dir(name=""): path = os.environ.get("XDG_CONFIG_HOME") if not path: path = os.path.join(os.environ.get("HOME", "/"), ".config") if name: path = os.path.join(path, name) return os.path.realpath(path) __all__ = ( "shell_open", "get_user_config_dir", )
import sys, os, subprocess def run(args, workdir=None): p = subprocess.Popen(args, close_fds=True, cwd=workdir) return p.wait() if sys.platform == "darwin": shell_open_command = "open" else: shell_open_command = "xdg-open" def shell_open(path, workdir=None): return run([shell_open_command, path], workdir=workdir) == 0 def get_user_config_dir(name=""): - path = os.environ.get("HOME", "") ? ---- + path = os.environ.get("XDG_CONFIG_HOME") ? +++++++++++ + if not path: + path = os.path.join(os.environ.get("HOME", "/"), ".config") if name: - path = os.path.join(path, "." + name) ? ------ + path = os.path.join(path, name) return os.path.realpath(path) __all__ = ( "shell_open", "get_user_config_dir", )
e8940b632737f75897c0ea7c108563a63f1a5dde
transducer/test/test_functional.py
transducer/test/test_functional.py
import unittest from transducer.functional import compose class TestComposition(unittest.TestCase): def test_single(self): """ compose(f)(x) -> f(x) """ f = lambda x: x * 2 c = compose(f) # We can't test the equivalence of functions completely, so... self.assertSequenceEqual([f(x) for x in range(1000)], [c(x) for x in range(1000)]) def test_double(self): """ compose(f, g)(x) -> f(g(x)) """ f = lambda x: x * 2 g = lambda x: x + 1 c = compose(f, g) self.assertSequenceEqual([f(g(x)) for x in range(100)], [c(x) for x in range(100)]) def test_triple(self): """ compose(f, g, h)(x) -> f(g(h(x))) """ f = lambda x: x * 2 g = lambda x: x + 1 h = lambda x: x - 7 c = compose(f, g, h) self.assertSequenceEqual([f(g(h(x))) for x in range(100)], [c(x) for x in range(100)]) if __name__ == '__main__': unittest.main()
import unittest from transducer.functional import compose, true, identity, false class TestComposition(unittest.TestCase): def test_single(self): """ compose(f)(x) -> f(x) """ f = lambda x: x * 2 c = compose(f) # We can't test the equivalence of functions completely, so... self.assertSequenceEqual([f(x) for x in range(1000)], [c(x) for x in range(1000)]) def test_double(self): """ compose(f, g)(x) -> f(g(x)) """ f = lambda x: x * 2 g = lambda x: x + 1 c = compose(f, g) self.assertSequenceEqual([f(g(x)) for x in range(100)], [c(x) for x in range(100)]) def test_triple(self): """ compose(f, g, h)(x) -> f(g(h(x))) """ f = lambda x: x * 2 g = lambda x: x + 1 h = lambda x: x - 7 c = compose(f, g, h) self.assertSequenceEqual([f(g(h(x))) for x in range(100)], [c(x) for x in range(100)]) class TestFunctions(unittest.TestCase): def test_true(self): self.assertTrue(true()) def test_false(self): self.assertFalse(false()) def test_identity(self): self.assertEqual(identity(42), 42) if __name__ == '__main__': unittest.main()
Improve test coverage of functional.py.
Improve test coverage of functional.py.
Python
mit
sixty-north/python-transducers
import unittest - from transducer.functional import compose + from transducer.functional import compose, true, identity, false class TestComposition(unittest.TestCase): def test_single(self): """ compose(f)(x) -> f(x) """ f = lambda x: x * 2 c = compose(f) # We can't test the equivalence of functions completely, so... self.assertSequenceEqual([f(x) for x in range(1000)], [c(x) for x in range(1000)]) def test_double(self): """ compose(f, g)(x) -> f(g(x)) """ f = lambda x: x * 2 g = lambda x: x + 1 c = compose(f, g) self.assertSequenceEqual([f(g(x)) for x in range(100)], [c(x) for x in range(100)]) - def test_triple(self): """ compose(f, g, h)(x) -> f(g(h(x))) """ f = lambda x: x * 2 g = lambda x: x + 1 h = lambda x: x - 7 c = compose(f, g, h) self.assertSequenceEqual([f(g(h(x))) for x in range(100)], [c(x) for x in range(100)]) + class TestFunctions(unittest.TestCase): + + def test_true(self): + self.assertTrue(true()) + + def test_false(self): + self.assertFalse(false()) + + def test_identity(self): + self.assertEqual(identity(42), 42) + + if __name__ == '__main__': unittest.main()
Improve test coverage of functional.py.
## Code Before: import unittest from transducer.functional import compose class TestComposition(unittest.TestCase): def test_single(self): """ compose(f)(x) -> f(x) """ f = lambda x: x * 2 c = compose(f) # We can't test the equivalence of functions completely, so... self.assertSequenceEqual([f(x) for x in range(1000)], [c(x) for x in range(1000)]) def test_double(self): """ compose(f, g)(x) -> f(g(x)) """ f = lambda x: x * 2 g = lambda x: x + 1 c = compose(f, g) self.assertSequenceEqual([f(g(x)) for x in range(100)], [c(x) for x in range(100)]) def test_triple(self): """ compose(f, g, h)(x) -> f(g(h(x))) """ f = lambda x: x * 2 g = lambda x: x + 1 h = lambda x: x - 7 c = compose(f, g, h) self.assertSequenceEqual([f(g(h(x))) for x in range(100)], [c(x) for x in range(100)]) if __name__ == '__main__': unittest.main() ## Instruction: Improve test coverage of functional.py. ## Code After: import unittest from transducer.functional import compose, true, identity, false class TestComposition(unittest.TestCase): def test_single(self): """ compose(f)(x) -> f(x) """ f = lambda x: x * 2 c = compose(f) # We can't test the equivalence of functions completely, so... self.assertSequenceEqual([f(x) for x in range(1000)], [c(x) for x in range(1000)]) def test_double(self): """ compose(f, g)(x) -> f(g(x)) """ f = lambda x: x * 2 g = lambda x: x + 1 c = compose(f, g) self.assertSequenceEqual([f(g(x)) for x in range(100)], [c(x) for x in range(100)]) def test_triple(self): """ compose(f, g, h)(x) -> f(g(h(x))) """ f = lambda x: x * 2 g = lambda x: x + 1 h = lambda x: x - 7 c = compose(f, g, h) self.assertSequenceEqual([f(g(h(x))) for x in range(100)], [c(x) for x in range(100)]) class TestFunctions(unittest.TestCase): def test_true(self): self.assertTrue(true()) def test_false(self): self.assertFalse(false()) def test_identity(self): self.assertEqual(identity(42), 42) if __name__ == '__main__': unittest.main()
import unittest - from transducer.functional import compose + from transducer.functional import compose, true, identity, false ? +++++++++++++++++++++++ class TestComposition(unittest.TestCase): def test_single(self): """ compose(f)(x) -> f(x) """ f = lambda x: x * 2 c = compose(f) # We can't test the equivalence of functions completely, so... self.assertSequenceEqual([f(x) for x in range(1000)], [c(x) for x in range(1000)]) def test_double(self): """ compose(f, g)(x) -> f(g(x)) """ f = lambda x: x * 2 g = lambda x: x + 1 c = compose(f, g) self.assertSequenceEqual([f(g(x)) for x in range(100)], [c(x) for x in range(100)]) - def test_triple(self): """ compose(f, g, h)(x) -> f(g(h(x))) """ f = lambda x: x * 2 g = lambda x: x + 1 h = lambda x: x - 7 c = compose(f, g, h) self.assertSequenceEqual([f(g(h(x))) for x in range(100)], [c(x) for x in range(100)]) + class TestFunctions(unittest.TestCase): + + def test_true(self): + self.assertTrue(true()) + + def test_false(self): + self.assertFalse(false()) + + def test_identity(self): + self.assertEqual(identity(42), 42) + + if __name__ == '__main__': unittest.main()
edca0ed4d7a03c0cd36a0ff132d6a9b89c374203
lizard_auth_server/utils.py
lizard_auth_server/utils.py
from __future__ import unicode_literals from random import SystemRandom from django.conf import settings import string # Note: the code in this module must be identical in both lizard-auth-server # and lizard-auth-client! random = SystemRandom() KEY_CHARACTERS = string.letters + string.digits # Keys that can be directly copied from the User object and passed to the # client. SIMPLE_KEYS = [ 'pk', 'username', 'first_name', 'last_name', 'email', 'is_staff', 'is_superuser', ] def default_gen_secret_key(length=40): return ''.join([random.choice(KEY_CHARACTERS) for _ in range(length)]) def gen_secret_key(length=40): generator = getattr(settings, 'SSO_KEYGENERATOR', default_gen_secret_key) return generator(length)
from __future__ import unicode_literals from random import SystemRandom from django.conf import settings import string # Note: the code in this module must be identical in both lizard-auth-server # and lizard-auth-client! random = SystemRandom() KEY_CHARACTERS = string.letters + string.digits # Keys that can be directly copied from the User object and passed to the # client. SIMPLE_KEYS = [ 'pk', 'username', 'first_name', 'last_name', 'email', 'is_active', 'is_staff', 'is_superuser', ] def default_gen_secret_key(length=40): return ''.join([random.choice(KEY_CHARACTERS) for _ in range(length)]) def gen_secret_key(length=40): generator = getattr(settings, 'SSO_KEYGENERATOR', default_gen_secret_key) return generator(length)
Add is_active to the list of keys to be dumped as json
Add is_active to the list of keys to be dumped as json
Python
mit
lizardsystem/lizard-auth-server,lizardsystem/lizard-auth-server
from __future__ import unicode_literals from random import SystemRandom from django.conf import settings import string # Note: the code in this module must be identical in both lizard-auth-server - # and lizard-auth-client! + # and lizard-auth-client! random = SystemRandom() KEY_CHARACTERS = string.letters + string.digits # Keys that can be directly copied from the User object and passed to the # client. SIMPLE_KEYS = [ 'pk', 'username', 'first_name', 'last_name', 'email', + 'is_active', 'is_staff', 'is_superuser', ] + def default_gen_secret_key(length=40): return ''.join([random.choice(KEY_CHARACTERS) for _ in range(length)]) + def gen_secret_key(length=40): generator = getattr(settings, 'SSO_KEYGENERATOR', default_gen_secret_key) return generator(length)
Add is_active to the list of keys to be dumped as json
## Code Before: from __future__ import unicode_literals from random import SystemRandom from django.conf import settings import string # Note: the code in this module must be identical in both lizard-auth-server # and lizard-auth-client! random = SystemRandom() KEY_CHARACTERS = string.letters + string.digits # Keys that can be directly copied from the User object and passed to the # client. SIMPLE_KEYS = [ 'pk', 'username', 'first_name', 'last_name', 'email', 'is_staff', 'is_superuser', ] def default_gen_secret_key(length=40): return ''.join([random.choice(KEY_CHARACTERS) for _ in range(length)]) def gen_secret_key(length=40): generator = getattr(settings, 'SSO_KEYGENERATOR', default_gen_secret_key) return generator(length) ## Instruction: Add is_active to the list of keys to be dumped as json ## Code After: from __future__ import unicode_literals from random import SystemRandom from django.conf import settings import string # Note: the code in this module must be identical in both lizard-auth-server # and lizard-auth-client! random = SystemRandom() KEY_CHARACTERS = string.letters + string.digits # Keys that can be directly copied from the User object and passed to the # client. SIMPLE_KEYS = [ 'pk', 'username', 'first_name', 'last_name', 'email', 'is_active', 'is_staff', 'is_superuser', ] def default_gen_secret_key(length=40): return ''.join([random.choice(KEY_CHARACTERS) for _ in range(length)]) def gen_secret_key(length=40): generator = getattr(settings, 'SSO_KEYGENERATOR', default_gen_secret_key) return generator(length)
from __future__ import unicode_literals from random import SystemRandom from django.conf import settings import string # Note: the code in this module must be identical in both lizard-auth-server - # and lizard-auth-client! ? - + # and lizard-auth-client! random = SystemRandom() KEY_CHARACTERS = string.letters + string.digits # Keys that can be directly copied from the User object and passed to the # client. SIMPLE_KEYS = [ 'pk', 'username', 'first_name', 'last_name', 'email', + 'is_active', 'is_staff', 'is_superuser', ] + def default_gen_secret_key(length=40): return ''.join([random.choice(KEY_CHARACTERS) for _ in range(length)]) + def gen_secret_key(length=40): generator = getattr(settings, 'SSO_KEYGENERATOR', default_gen_secret_key) return generator(length)
bfd8ac40bed4535a91bfd645cbe80b47c827a8de
librarian/embeds/mathml.py
librarian/embeds/mathml.py
from __future__ import unicode_literals from lxml import etree import six from librarian import get_resource from . import TreeEmbed, create_embed, downgrades_to class MathML(TreeEmbed): @downgrades_to('application/x-latex') def to_latex(self): xslt = etree.parse(get_resource('res/embeds/mathml/mathml2latex.xslt')) output = self.tree.xslt(xslt) return create_embed('application/x-latex', data=six.text_type(output))
from __future__ import unicode_literals from lxml import etree import six from librarian import get_resource from . import TreeEmbed, create_embed, downgrades_to class MathML(TreeEmbed): @downgrades_to('application/x-latex') def to_latex(self): """ >>> print(MathML(etree.fromstring('<mat>a &lt; b</mat>')).to_latex().data.strip()) a < b >>> print(MathML(etree.fromstring('<mat>&lt; &amp; &amp;lt; &#65;</mat>')).to_latex().data.strip()) < & &lt; A """ xslt = etree.parse(get_resource('res/embeds/mathml/mathml2latex.xslt')) output = self.tree.xslt(xslt) text = six.text_type(output) # Workaround for entities being preserved in output. But there should be a better way. text = text.replace('&lt;', '<').replace('&amp;', '&') return create_embed('application/x-latex', data=text)
Fix XML entities left from MathML.
Fix XML entities left from MathML.
Python
agpl-3.0
fnp/librarian,fnp/librarian
from __future__ import unicode_literals from lxml import etree import six from librarian import get_resource from . import TreeEmbed, create_embed, downgrades_to class MathML(TreeEmbed): @downgrades_to('application/x-latex') def to_latex(self): + """ + >>> print(MathML(etree.fromstring('<mat>a &lt; b</mat>')).to_latex().data.strip()) + a < b + + >>> print(MathML(etree.fromstring('<mat>&lt; &amp; &amp;lt; &#65;</mat>')).to_latex().data.strip()) + < & &lt; A + + """ xslt = etree.parse(get_resource('res/embeds/mathml/mathml2latex.xslt')) output = self.tree.xslt(xslt) + text = six.text_type(output) + # Workaround for entities being preserved in output. But there should be a better way. + text = text.replace('&lt;', '<').replace('&amp;', '&') - return create_embed('application/x-latex', data=six.text_type(output)) + return create_embed('application/x-latex', data=text)
Fix XML entities left from MathML.
## Code Before: from __future__ import unicode_literals from lxml import etree import six from librarian import get_resource from . import TreeEmbed, create_embed, downgrades_to class MathML(TreeEmbed): @downgrades_to('application/x-latex') def to_latex(self): xslt = etree.parse(get_resource('res/embeds/mathml/mathml2latex.xslt')) output = self.tree.xslt(xslt) return create_embed('application/x-latex', data=six.text_type(output)) ## Instruction: Fix XML entities left from MathML. ## Code After: from __future__ import unicode_literals from lxml import etree import six from librarian import get_resource from . import TreeEmbed, create_embed, downgrades_to class MathML(TreeEmbed): @downgrades_to('application/x-latex') def to_latex(self): """ >>> print(MathML(etree.fromstring('<mat>a &lt; b</mat>')).to_latex().data.strip()) a < b >>> print(MathML(etree.fromstring('<mat>&lt; &amp; &amp;lt; &#65;</mat>')).to_latex().data.strip()) < & &lt; A """ xslt = etree.parse(get_resource('res/embeds/mathml/mathml2latex.xslt')) output = self.tree.xslt(xslt) text = six.text_type(output) # Workaround for entities being preserved in output. But there should be a better way. text = text.replace('&lt;', '<').replace('&amp;', '&') return create_embed('application/x-latex', data=text)
from __future__ import unicode_literals from lxml import etree import six from librarian import get_resource from . import TreeEmbed, create_embed, downgrades_to class MathML(TreeEmbed): @downgrades_to('application/x-latex') def to_latex(self): + """ + >>> print(MathML(etree.fromstring('<mat>a &lt; b</mat>')).to_latex().data.strip()) + a < b + + >>> print(MathML(etree.fromstring('<mat>&lt; &amp; &amp;lt; &#65;</mat>')).to_latex().data.strip()) + < & &lt; A + + """ xslt = etree.parse(get_resource('res/embeds/mathml/mathml2latex.xslt')) output = self.tree.xslt(xslt) + text = six.text_type(output) + # Workaround for entities being preserved in output. But there should be a better way. + text = text.replace('&lt;', '<').replace('&amp;', '&') - return create_embed('application/x-latex', data=six.text_type(output)) ? ---- ------------ - + return create_embed('application/x-latex', data=text)
0b49114a6b0830fa0b05d32803ae52526b8e48ca
gnsq/backofftimer.py
gnsq/backofftimer.py
from random import randint class BackoffTimer(object): def __init__(self, ratio=1, max_interval=None, min_interval=None): self.c = 0 self.ratio = ratio self.max_interval = max_interval self.min_interval = min_interval def is_reset(self): return self.c == 0 def reset(self): self.c = 0 return self def success(self): self.c = max(self.c - 1, 0) return self def failure(self): self.c += 1 return self def get_interval(self): k = pow(2, self.c) - 1 interval = randint(0, k) * self.ratio if self.max_interval is not None: interval = min(interval, self.max_interval) if self.min_interval is not None: interval = max(interval, self.min_interval) return interval
import random class BackoffTimer(object): def __init__(self, ratio=1, max_interval=None, min_interval=None): self.c = 0 self.ratio = ratio self.max_interval = max_interval self.min_interval = min_interval def is_reset(self): return self.c == 0 def reset(self): self.c = 0 return self def success(self): self.c = max(self.c - 1, 0) return self def failure(self): self.c += 1 return self def get_interval(self): k = pow(2, self.c) - 1 interval = random.random() * k * self.ratio if self.max_interval is not None: interval = min(interval, self.max_interval) if self.min_interval is not None: interval = max(interval, self.min_interval) return interval
Return float for interval instead of int.
Return float for interval instead of int.
Python
bsd-3-clause
hiringsolved/gnsq,wtolson/gnsq,wtolson/gnsq
- from random import randint + import random class BackoffTimer(object): def __init__(self, ratio=1, max_interval=None, min_interval=None): self.c = 0 self.ratio = ratio self.max_interval = max_interval self.min_interval = min_interval def is_reset(self): return self.c == 0 def reset(self): self.c = 0 return self def success(self): self.c = max(self.c - 1, 0) return self def failure(self): self.c += 1 return self def get_interval(self): k = pow(2, self.c) - 1 - interval = randint(0, k) * self.ratio + interval = random.random() * k * self.ratio if self.max_interval is not None: interval = min(interval, self.max_interval) if self.min_interval is not None: interval = max(interval, self.min_interval) return interval
Return float for interval instead of int.
## Code Before: from random import randint class BackoffTimer(object): def __init__(self, ratio=1, max_interval=None, min_interval=None): self.c = 0 self.ratio = ratio self.max_interval = max_interval self.min_interval = min_interval def is_reset(self): return self.c == 0 def reset(self): self.c = 0 return self def success(self): self.c = max(self.c - 1, 0) return self def failure(self): self.c += 1 return self def get_interval(self): k = pow(2, self.c) - 1 interval = randint(0, k) * self.ratio if self.max_interval is not None: interval = min(interval, self.max_interval) if self.min_interval is not None: interval = max(interval, self.min_interval) return interval ## Instruction: Return float for interval instead of int. ## Code After: import random class BackoffTimer(object): def __init__(self, ratio=1, max_interval=None, min_interval=None): self.c = 0 self.ratio = ratio self.max_interval = max_interval self.min_interval = min_interval def is_reset(self): return self.c == 0 def reset(self): self.c = 0 return self def success(self): self.c = max(self.c - 1, 0) return self def failure(self): self.c += 1 return self def get_interval(self): k = pow(2, self.c) - 1 interval = random.random() * k * self.ratio if self.max_interval is not None: interval = min(interval, self.max_interval) if self.min_interval is not None: interval = max(interval, self.min_interval) return interval
- from random import randint + import random class BackoffTimer(object): def __init__(self, ratio=1, max_interval=None, min_interval=None): self.c = 0 self.ratio = ratio self.max_interval = max_interval self.min_interval = min_interval def is_reset(self): return self.c == 0 def reset(self): self.c = 0 return self def success(self): self.c = max(self.c - 1, 0) return self def failure(self): self.c += 1 return self def get_interval(self): k = pow(2, self.c) - 1 - interval = randint(0, k) * self.ratio ? ^ ^ ^^ - + interval = random.random() * k * self.ratio ? ^^^^^ ^^^ ^^^ if self.max_interval is not None: interval = min(interval, self.max_interval) if self.min_interval is not None: interval = max(interval, self.min_interval) return interval
dd2c92bea635d7cfc93b437ce32266126bceb1e9
qipipe/helpers/bolus_arrival.py
qipipe/helpers/bolus_arrival.py
class BolusArrivalError(Exception): pass def bolus_arrival_index(time_series): """ Determines the DCE bolus arrival series index. The bolus arrival is the first series with a difference in average signal larger than double the difference from first two points. :param time_series: the 4D NiFTI scan image file path :return: the bolus arrival series index :raise BolusArrivalError: if the bolus arrival could not be determined """ import nibabel as nb import numpy as np nii = nb.load(time_series) data = nii.get_data() n_vols = data.shape[-1] signal_means = np.array([np.mean(data[:,:,:, idx]) for idx in xrange(n_vols)]) signal_diffs = np.diff(signal_means) # If we see a difference in average signal larger than double the # difference from first two points, take that as bolus arrival. base_diff = np.abs(signal_diffs[0]) for idx, diff_val in enumerate(signal_diffs[1:]): if diff_val > 2 * base_diff: return idx + 1 else: raise BolusArrivalError("Unable to determine bolus arrival")
class BolusArrivalError(Exception): pass def bolus_arrival_index(time_series): """ Determines the DCE bolus arrival time point index. The bolus arrival is the first occurence of a difference in average signal larger than double the difference from first two points. :param time_series: the 4D NiFTI scan image file path :return: the bolus arrival time point index :raise BolusArrivalError: if the bolus arrival could not be determined """ import nibabel as nb import numpy as np nii = nb.load(time_series) data = nii.get_data() n_vols = data.shape[-1] signal_means = np.array([np.mean(data[:,:,:, idx]) for idx in xrange(n_vols)]) signal_diffs = np.diff(signal_means) # If we see a difference in average signal larger than double the # difference from first two points, take that as bolus arrival. base_diff = np.abs(signal_diffs[0]) for idx, diff_val in enumerate(signal_diffs[1:]): if diff_val > 2 * base_diff: return idx + 1 else: raise BolusArrivalError("Unable to determine bolus arrival")
Change series to time point.
Change series to time point.
Python
bsd-2-clause
ohsu-qin/qipipe
class BolusArrivalError(Exception): pass def bolus_arrival_index(time_series): """ - Determines the DCE bolus arrival series index. The bolus arrival is + Determines the DCE bolus arrival time point index. The bolus arrival - the first series with a difference in average signal larger than + is the first occurence of a difference in average signal larger than double the difference from first two points. :param time_series: the 4D NiFTI scan image file path - :return: the bolus arrival series index + :return: the bolus arrival time point index :raise BolusArrivalError: if the bolus arrival could not be determined """ import nibabel as nb import numpy as np nii = nb.load(time_series) data = nii.get_data() n_vols = data.shape[-1] signal_means = np.array([np.mean(data[:,:,:, idx]) for idx in xrange(n_vols)]) signal_diffs = np.diff(signal_means) # If we see a difference in average signal larger than double the # difference from first two points, take that as bolus arrival. base_diff = np.abs(signal_diffs[0]) for idx, diff_val in enumerate(signal_diffs[1:]): if diff_val > 2 * base_diff: return idx + 1 else: raise BolusArrivalError("Unable to determine bolus arrival")
Change series to time point.
## Code Before: class BolusArrivalError(Exception): pass def bolus_arrival_index(time_series): """ Determines the DCE bolus arrival series index. The bolus arrival is the first series with a difference in average signal larger than double the difference from first two points. :param time_series: the 4D NiFTI scan image file path :return: the bolus arrival series index :raise BolusArrivalError: if the bolus arrival could not be determined """ import nibabel as nb import numpy as np nii = nb.load(time_series) data = nii.get_data() n_vols = data.shape[-1] signal_means = np.array([np.mean(data[:,:,:, idx]) for idx in xrange(n_vols)]) signal_diffs = np.diff(signal_means) # If we see a difference in average signal larger than double the # difference from first two points, take that as bolus arrival. base_diff = np.abs(signal_diffs[0]) for idx, diff_val in enumerate(signal_diffs[1:]): if diff_val > 2 * base_diff: return idx + 1 else: raise BolusArrivalError("Unable to determine bolus arrival") ## Instruction: Change series to time point. ## Code After: class BolusArrivalError(Exception): pass def bolus_arrival_index(time_series): """ Determines the DCE bolus arrival time point index. The bolus arrival is the first occurence of a difference in average signal larger than double the difference from first two points. :param time_series: the 4D NiFTI scan image file path :return: the bolus arrival time point index :raise BolusArrivalError: if the bolus arrival could not be determined """ import nibabel as nb import numpy as np nii = nb.load(time_series) data = nii.get_data() n_vols = data.shape[-1] signal_means = np.array([np.mean(data[:,:,:, idx]) for idx in xrange(n_vols)]) signal_diffs = np.diff(signal_means) # If we see a difference in average signal larger than double the # difference from first two points, take that as bolus arrival. base_diff = np.abs(signal_diffs[0]) for idx, diff_val in enumerate(signal_diffs[1:]): if diff_val > 2 * base_diff: return idx + 1 else: raise BolusArrivalError("Unable to determine bolus arrival")
class BolusArrivalError(Exception): pass def bolus_arrival_index(time_series): """ - Determines the DCE bolus arrival series index. The bolus arrival is ? ^ ^ ^^ --- + Determines the DCE bolus arrival time point index. The bolus arrival ? ^^^ ^^^ ^^ - the first series with a difference in average signal larger than ? ^ ^^ - ^^^^ + is the first occurence of a difference in average signal larger than ? +++ ^^^^^ ^^ ^^ double the difference from first two points. :param time_series: the 4D NiFTI scan image file path - :return: the bolus arrival series index ? ^ ^ ^^ + :return: the bolus arrival time point index ? ^^^ ^^^ ^^ :raise BolusArrivalError: if the bolus arrival could not be determined """ import nibabel as nb import numpy as np nii = nb.load(time_series) data = nii.get_data() n_vols = data.shape[-1] signal_means = np.array([np.mean(data[:,:,:, idx]) for idx in xrange(n_vols)]) signal_diffs = np.diff(signal_means) # If we see a difference in average signal larger than double the # difference from first two points, take that as bolus arrival. base_diff = np.abs(signal_diffs[0]) for idx, diff_val in enumerate(signal_diffs[1:]): if diff_val > 2 * base_diff: return idx + 1 else: raise BolusArrivalError("Unable to determine bolus arrival")
fd5f3875d0d7e0fdb7b7ef33a94cf50d1d2b5fa4
tests/write_to_stringio_test.py
tests/write_to_stringio_test.py
import pycurl import unittest from . import appmanager from . import util setup_module, teardown_module = appmanager.setup(('app', 8380)) class WriteToStringioTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_to_bytesio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') sio = util.BytesIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) self.curl.perform() self.assertEqual('success', sio.getvalue().decode())
import pycurl import unittest import sys from . import appmanager from . import util setup_module, teardown_module = appmanager.setup(('app', 8380)) class WriteToStringioTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_to_bytesio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') sio = util.BytesIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) self.curl.perform() self.assertEqual('success', sio.getvalue().decode()) @util.only_python3 def test_write_to_stringio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') # stringio in python 3 sio = util.StringIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) try: self.curl.perform() self.fail('Should have received a write error') except pycurl.error: err, msg = sys.exc_info()[1].args # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err
Add a test for writing to StringIO which is now different and does not work
Add a test for writing to StringIO which is now different and does not work
Python
lgpl-2.1
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
import pycurl import unittest + import sys from . import appmanager from . import util setup_module, teardown_module = appmanager.setup(('app', 8380)) class WriteToStringioTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_to_bytesio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') sio = util.BytesIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) self.curl.perform() self.assertEqual('success', sio.getvalue().decode()) + + @util.only_python3 + def test_write_to_stringio(self): + self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') + # stringio in python 3 + sio = util.StringIO() + self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) + try: + self.curl.perform() + + self.fail('Should have received a write error') + except pycurl.error: + err, msg = sys.exc_info()[1].args + # we expect pycurl.E_WRITE_ERROR as the response + assert pycurl.E_WRITE_ERROR == err
Add a test for writing to StringIO which is now different and does not work
## Code Before: import pycurl import unittest from . import appmanager from . import util setup_module, teardown_module = appmanager.setup(('app', 8380)) class WriteToStringioTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_to_bytesio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') sio = util.BytesIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) self.curl.perform() self.assertEqual('success', sio.getvalue().decode()) ## Instruction: Add a test for writing to StringIO which is now different and does not work ## Code After: import pycurl import unittest import sys from . import appmanager from . import util setup_module, teardown_module = appmanager.setup(('app', 8380)) class WriteToStringioTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_to_bytesio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') sio = util.BytesIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) self.curl.perform() self.assertEqual('success', sio.getvalue().decode()) @util.only_python3 def test_write_to_stringio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') # stringio in python 3 sio = util.StringIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) try: self.curl.perform() self.fail('Should have received a write error') except pycurl.error: err, msg = sys.exc_info()[1].args # we expect pycurl.E_WRITE_ERROR as the response assert pycurl.E_WRITE_ERROR == err
import pycurl import unittest + import sys from . import appmanager from . import util setup_module, teardown_module = appmanager.setup(('app', 8380)) class WriteToStringioTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() def test_write_to_bytesio(self): self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') sio = util.BytesIO() self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) self.curl.perform() self.assertEqual('success', sio.getvalue().decode()) + + @util.only_python3 + def test_write_to_stringio(self): + self.curl.setopt(pycurl.URL, 'http://localhost:8380/success') + # stringio in python 3 + sio = util.StringIO() + self.curl.setopt(pycurl.WRITEFUNCTION, sio.write) + try: + self.curl.perform() + + self.fail('Should have received a write error') + except pycurl.error: + err, msg = sys.exc_info()[1].args + # we expect pycurl.E_WRITE_ERROR as the response + assert pycurl.E_WRITE_ERROR == err
62ca16d355716c3baaf7a661269e54a517fef25d
tests/test_hooks.py
tests/test_hooks.py
from io import BytesIO from unittest.mock import MagicMock, patch from isort import hooks def test_git_hook(): """Simple smoke level testing of git hooks""" # Ensure correct subprocess command is called with patch("subprocess.run", MagicMock()) as run_mock: hooks.git_hook() assert run_mock.called_with( ["git", "diff-index", "--cached", "--name-only", "--diff-filter=ACMRTUXB HEAD"] ) # Test with incorrectly sorted file returned from git with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock: class FakeProecssResponse(object): stdout = b"import b\nimport a" with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock: with patch("isort.hooks.SortImports", MagicMock()): hooks.git_hook(modify=True)
from io import BytesIO from unittest.mock import MagicMock, patch from isort import hooks def test_git_hook(): """Simple smoke level testing of git hooks""" # Ensure correct subprocess command is called with patch("subprocess.run", MagicMock()) as run_mock: hooks.git_hook() assert run_mock.called_with( ["git", "diff-index", "--cached", "--name-only", "--diff-filter=ACMRTUXB HEAD"] ) # Test with incorrectly sorted file returned from git with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock: class FakeProecssResponse(object): stdout = b"import b\nimport a" with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock: with patch("isort.hooks.api", MagicMock()): hooks.git_hook(modify=True)
Fix mock statement for new API
Fix mock statement for new API
Python
mit
PyCQA/isort,PyCQA/isort
from io import BytesIO from unittest.mock import MagicMock, patch from isort import hooks def test_git_hook(): """Simple smoke level testing of git hooks""" # Ensure correct subprocess command is called with patch("subprocess.run", MagicMock()) as run_mock: hooks.git_hook() assert run_mock.called_with( ["git", "diff-index", "--cached", "--name-only", "--diff-filter=ACMRTUXB HEAD"] ) # Test with incorrectly sorted file returned from git with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock: class FakeProecssResponse(object): stdout = b"import b\nimport a" with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock: - with patch("isort.hooks.SortImports", MagicMock()): + with patch("isort.hooks.api", MagicMock()): hooks.git_hook(modify=True)
Fix mock statement for new API
## Code Before: from io import BytesIO from unittest.mock import MagicMock, patch from isort import hooks def test_git_hook(): """Simple smoke level testing of git hooks""" # Ensure correct subprocess command is called with patch("subprocess.run", MagicMock()) as run_mock: hooks.git_hook() assert run_mock.called_with( ["git", "diff-index", "--cached", "--name-only", "--diff-filter=ACMRTUXB HEAD"] ) # Test with incorrectly sorted file returned from git with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock: class FakeProecssResponse(object): stdout = b"import b\nimport a" with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock: with patch("isort.hooks.SortImports", MagicMock()): hooks.git_hook(modify=True) ## Instruction: Fix mock statement for new API ## Code After: from io import BytesIO from unittest.mock import MagicMock, patch from isort import hooks def test_git_hook(): """Simple smoke level testing of git hooks""" # Ensure correct subprocess command is called with patch("subprocess.run", MagicMock()) as run_mock: hooks.git_hook() assert run_mock.called_with( ["git", "diff-index", "--cached", "--name-only", "--diff-filter=ACMRTUXB HEAD"] ) # Test with incorrectly sorted file returned from git with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock: class FakeProecssResponse(object): stdout = b"import b\nimport a" with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock: with patch("isort.hooks.api", MagicMock()): hooks.git_hook(modify=True)
from io import BytesIO from unittest.mock import MagicMock, patch from isort import hooks def test_git_hook(): """Simple smoke level testing of git hooks""" # Ensure correct subprocess command is called with patch("subprocess.run", MagicMock()) as run_mock: hooks.git_hook() assert run_mock.called_with( ["git", "diff-index", "--cached", "--name-only", "--diff-filter=ACMRTUXB HEAD"] ) # Test with incorrectly sorted file returned from git with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock: class FakeProecssResponse(object): stdout = b"import b\nimport a" with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock: - with patch("isort.hooks.SortImports", MagicMock()): ? ^^^^^^ ^^^^ + with patch("isort.hooks.api", MagicMock()): ? ^ ^ hooks.git_hook(modify=True)
79d02616ab6d70b029876b8a2de425026e6268c4
pycalc.py
pycalc.py
import sys import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
import sys if sys.version_info.major < 3: print("This program is for python version 3 only.") sys.exit(3) import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
Make main program throw warning on python2.
Make main program throw warning on python2.
Python
mit
5225225/pycalc,5225225/pycalc
+ import sys + + if sys.version_info.major < 3: + print("This program is for python version 3 only.") + sys.exit(3) import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
Make main program throw warning on python2.
## Code Before: import sys import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break ## Instruction: Make main program throw warning on python2. ## Code After: import sys if sys.version_info.major < 3: print("This program is for python version 3 only.") sys.exit(3) import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
+ import sys + + if sys.version_info.major < 3: + print("This program is for python version 3 only.") + sys.exit(3) import lexer import execute while True: instr = input("» ") toks = lexer.to_toks(instr) rpn = lexer.to_rpn(toks) result = execute.eval_rpn(rpn) if result is not None: print(result) if len(sys.argv) >= 2: break
eeecf68d2d59bc2233478b01748cbf88bab85722
setup.py
setup.py
from distutils.core import setup execfile('facebook/version.py') setup( name = 'Facebook', version = __version__, description = 'Facebook makes it even easier to interact with Facebook\'s Graph API', long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), author = 'Johannes Gorset', author_email = '[email protected]', url = 'http://github.com/jgorset/facebook', packages = [ 'facebook' ] )
from distutils.core import setup execfile('facebook/version.py') setup( name='Facebook', version=__version__, description='Facebook makes it even easier to interact "+\ "with Facebook\'s Graph API', long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), author='Johannes Gorset', author_email='[email protected]', url='http://github.com/jgorset/facebook', requires=['facepy'], packages=[ 'facebook' ] )
Add missing requires and PEP8ize.
Add missing requires and PEP8ize.
Python
mit
jgorset/facebook,vyyvyyv/facebook,jgorset/facebook,vyyvyyv/facebook
from distutils.core import setup execfile('facebook/version.py') setup( - name = 'Facebook', + name='Facebook', - version = __version__, + version=__version__, - description = 'Facebook makes it even easier to interact with Facebook\'s Graph API', + description='Facebook makes it even easier to interact "+\ + "with Facebook\'s Graph API', - long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), + long_description=open('README.rst').read() + '\n\n' + + open('HISTORY.rst').read(), - author = 'Johannes Gorset', + author='Johannes Gorset', - author_email = '[email protected]', + author_email='[email protected]', - url = 'http://github.com/jgorset/facebook', + url='http://github.com/jgorset/facebook', + requires=['facepy'], - packages = [ + packages=[ 'facebook' ] )
Add missing requires and PEP8ize.
## Code Before: from distutils.core import setup execfile('facebook/version.py') setup( name = 'Facebook', version = __version__, description = 'Facebook makes it even easier to interact with Facebook\'s Graph API', long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), author = 'Johannes Gorset', author_email = '[email protected]', url = 'http://github.com/jgorset/facebook', packages = [ 'facebook' ] ) ## Instruction: Add missing requires and PEP8ize. ## Code After: from distutils.core import setup execfile('facebook/version.py') setup( name='Facebook', version=__version__, description='Facebook makes it even easier to interact "+\ "with Facebook\'s Graph API', long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), author='Johannes Gorset', author_email='[email protected]', url='http://github.com/jgorset/facebook', requires=['facepy'], packages=[ 'facebook' ] )
from distutils.core import setup execfile('facebook/version.py') setup( - name = 'Facebook', ? - - + name='Facebook', - version = __version__, ? - - + version=__version__, - description = 'Facebook makes it even easier to interact with Facebook\'s Graph API', ? - - ^^^^^^^^^^^^^ -------------- + description='Facebook makes it even easier to interact "+\ ? ^^ + "with Facebook\'s Graph API', - long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), ? - - ---------------------------- + long_description=open('README.rst').read() + '\n\n' + + open('HISTORY.rst').read(), - author = 'Johannes Gorset', ? - - + author='Johannes Gorset', - author_email = '[email protected]', ? - - + author_email='[email protected]', - url = 'http://github.com/jgorset/facebook', ? - - + url='http://github.com/jgorset/facebook', + requires=['facepy'], - packages = [ ? - - + packages=[ 'facebook' ] )
b1d889dc4207af08e8c1ee3f75006fa6b4051376
vitrage/rpc.py
vitrage/rpc.py
from oslo_config import cfg import oslo_messaging as messaging OPTS = [ cfg.StrOpt('rpc_topic', default='rpcapiv1', help='The topic vitrage listens on'), ] def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def get_client(transport, target, version_cap=None, serializer=None): assert transport is not None return messaging.RPCClient(transport, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, transport, serializer=None): assert transport is not None return messaging.get_rpc_server(transport, target, endpoints, executor='eventlet', serializer=serializer)
from oslo_config import cfg import oslo_messaging as messaging from oslo_messaging.rpc import dispatcher OPTS = [ cfg.StrOpt('rpc_topic', default='rpcapiv1', help='The topic vitrage listens on'), ] def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def get_client(transport, target, version_cap=None, serializer=None): assert transport is not None return messaging.RPCClient(transport, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, transport, serializer=None): assert transport is not None access_policy = dispatcher.DefaultRPCAccessPolicy return messaging.get_rpc_server(transport, target, endpoints, executor='eventlet', serializer=serializer, access_policy=access_policy)
Set access_policy for messaging's dispatcher
Set access_policy for messaging's dispatcher oslo.messaging allow dispatcher to restrict endpoint methods since 5.11.0 in d3a8f280ebd6fd12865fd20c4d772774e39aefa2, set with DefaultRPCAccessPolicy to fix FutureWarning like: "The access_policy argument is changing its default value to <class 'oslo_messaging.rpc.dispatcher.DefaultRPCAccessPolicy'> in version '?', please update the code to explicitly set None as the value: access_policy defaults to LegacyRPCAccessPolicy which exposes private methods. Explicitly set access_policy to DefaultRPCAccessPolicy or ExplicitRPCAccessPolicy. Change-Id: Ib90013909237816cf906d584778f5e0ce7152fab
Python
apache-2.0
openstack/vitrage,openstack/vitrage,openstack/vitrage
from oslo_config import cfg import oslo_messaging as messaging + from oslo_messaging.rpc import dispatcher OPTS = [ cfg.StrOpt('rpc_topic', default='rpcapiv1', help='The topic vitrage listens on'), ] def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def get_client(transport, target, version_cap=None, serializer=None): assert transport is not None return messaging.RPCClient(transport, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, transport, serializer=None): assert transport is not None + access_policy = dispatcher.DefaultRPCAccessPolicy return messaging.get_rpc_server(transport, target, endpoints, executor='eventlet', - serializer=serializer) + serializer=serializer, + access_policy=access_policy)
Set access_policy for messaging's dispatcher
## Code Before: from oslo_config import cfg import oslo_messaging as messaging OPTS = [ cfg.StrOpt('rpc_topic', default='rpcapiv1', help='The topic vitrage listens on'), ] def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def get_client(transport, target, version_cap=None, serializer=None): assert transport is not None return messaging.RPCClient(transport, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, transport, serializer=None): assert transport is not None return messaging.get_rpc_server(transport, target, endpoints, executor='eventlet', serializer=serializer) ## Instruction: Set access_policy for messaging's dispatcher ## Code After: from oslo_config import cfg import oslo_messaging as messaging from oslo_messaging.rpc import dispatcher OPTS = [ cfg.StrOpt('rpc_topic', default='rpcapiv1', help='The topic vitrage listens on'), ] def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def get_client(transport, target, version_cap=None, serializer=None): assert transport is not None return messaging.RPCClient(transport, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, transport, serializer=None): assert transport is not None access_policy = dispatcher.DefaultRPCAccessPolicy return messaging.get_rpc_server(transport, target, endpoints, executor='eventlet', serializer=serializer, access_policy=access_policy)
from oslo_config import cfg import oslo_messaging as messaging + from oslo_messaging.rpc import dispatcher OPTS = [ cfg.StrOpt('rpc_topic', default='rpcapiv1', help='The topic vitrage listens on'), ] def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def get_client(transport, target, version_cap=None, serializer=None): assert transport is not None return messaging.RPCClient(transport, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, transport, serializer=None): assert transport is not None + access_policy = dispatcher.DefaultRPCAccessPolicy return messaging.get_rpc_server(transport, target, endpoints, executor='eventlet', - serializer=serializer) ? ^ + serializer=serializer, ? ^ + access_policy=access_policy)
fbadf23356b40c36378cef8f3a9c8b382bce9e32
comics/core/admin.py
comics/core/admin.py
from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
Include start date, end date, and active flag in comics list
Include start date, end date, and active flag in comics list
Python
agpl-3.0
jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics
from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'language', 'url', 'rights') + list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', + 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
Include start date, end date, and active flag in comics list
## Code Before: from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin) ## Instruction: Include start date, end date, and active flag in comics list ## Code After: from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'language', 'url', 'rights') ? ^ + list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', ? ^^^^^^^^^^^^^^^ + 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
7bde0ba157431311ae138acd8a2018f85d8af91d
test_data.py
test_data.py
def f1(a, # S100 b): # S101 pass def f2( a, b # S101 ): pass def f3( a, b, ): pass # trailing comma after *args or **kwargs is a syntax error therefore # we don't want to enforce it such situations def f4( a, *args ): pass def f5( b, **kwargs ): pass f3(1, # S100 2) # S101 f3( 1, 2) # S101 f3( 1, 2 # S101 ) f3( 1, 2, ) kwargs = {} f5('-o', # S100 some_keyword_argument='./') # S101 f5( b='something', ) ( ''. format())
def f1(a, # S100 b): # S101 pass def f2( a, b # S101 ): pass def f3( a, b, ): pass # trailing comma after *args or **kwargs is a syntax error therefore # we don't want to enforce it such situations def f4( a, *args ): pass def f5( b, **kwargs ): pass def f6( *, d ): pass f3(1, # S100 2) # S101 f3( 1, 2) # S101 f3( 1, 2 # S101 ) f3( 1, 2, ) kwargs = {} f5('-o', # S100 some_keyword_argument='./') # S101 f5( b='something', ) ( ''. format())
Add a test for functions with keyword only arguments
Add a test for functions with keyword only arguments This adds a test to ensure that no error is raised if a trailing comma is missing from a function definition that has keyword only arguments. Reviewed-by: Jakub Stasiak <[email protected]>
Python
mit
smarkets/flake8-strict
def f1(a, # S100 b): # S101 pass def f2( a, b # S101 ): pass def f3( a, b, ): pass # trailing comma after *args or **kwargs is a syntax error therefore # we don't want to enforce it such situations def f4( a, *args ): pass def f5( b, **kwargs ): pass + def f6( + *, + d + ): + pass + f3(1, # S100 2) # S101 f3( 1, 2) # S101 f3( 1, 2 # S101 ) f3( 1, 2, ) kwargs = {} f5('-o', # S100 some_keyword_argument='./') # S101 f5( b='something', ) ( ''. format())
Add a test for functions with keyword only arguments
## Code Before: def f1(a, # S100 b): # S101 pass def f2( a, b # S101 ): pass def f3( a, b, ): pass # trailing comma after *args or **kwargs is a syntax error therefore # we don't want to enforce it such situations def f4( a, *args ): pass def f5( b, **kwargs ): pass f3(1, # S100 2) # S101 f3( 1, 2) # S101 f3( 1, 2 # S101 ) f3( 1, 2, ) kwargs = {} f5('-o', # S100 some_keyword_argument='./') # S101 f5( b='something', ) ( ''. format()) ## Instruction: Add a test for functions with keyword only arguments ## Code After: def f1(a, # S100 b): # S101 pass def f2( a, b # S101 ): pass def f3( a, b, ): pass # trailing comma after *args or **kwargs is a syntax error therefore # we don't want to enforce it such situations def f4( a, *args ): pass def f5( b, **kwargs ): pass def f6( *, d ): pass f3(1, # S100 2) # S101 f3( 1, 2) # S101 f3( 1, 2 # S101 ) f3( 1, 2, ) kwargs = {} f5('-o', # S100 some_keyword_argument='./') # S101 f5( b='something', ) ( ''. format())
def f1(a, # S100 b): # S101 pass def f2( a, b # S101 ): pass def f3( a, b, ): pass # trailing comma after *args or **kwargs is a syntax error therefore # we don't want to enforce it such situations def f4( a, *args ): pass def f5( b, **kwargs ): pass + def f6( + *, + d + ): + pass + f3(1, # S100 2) # S101 f3( 1, 2) # S101 f3( 1, 2 # S101 ) f3( 1, 2, ) kwargs = {} f5('-o', # S100 some_keyword_argument='./') # S101 f5( b='something', ) ( ''. format())
83919e74b7d20688811a4f782d4fccaf3bc3c055
comics/comics/hijinksensue.py
comics/comics/hijinksensue.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "HijiNKS Ensue" language = "en" url = "http://hijinksensue.com/" start_date = "2007-05-11" rights = "Joel Watson" class Crawler(CrawlerBase): history_capable_days = 180 time_zone = "US/Central" def crawl(self, pub_date): feed = self.parse_feed("http://hijinksensue.com/feed/") for entry in feed.for_date(pub_date): if "/comic/" not in entry.link: continue url = entry.content0.src('img[src*="-300x120"]') if not url: continue url = url.replace("-300x120", "") title = entry.title return CrawlerImage(url, title)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "HijiNKS Ensue" language = "en" url = "http://hijinksensue.com/" start_date = "2007-05-11" rights = "Joel Watson" active = False class Crawler(CrawlerBase): history_capable_date = '2015-03-11' time_zone = "US/Central" def crawl(self, pub_date): feed = self.parse_feed("http://hijinksensue.com/feed/") for entry in feed.for_date(pub_date): if "/comic/" not in entry.link: continue url = entry.content0.src('img[srcset*="-300x120"]') if not url: continue url = url.replace("-300x120", "") title = entry.title return CrawlerImage(url, title)
Update "HijiNKS Ensue" after feed change
Update "HijiNKS Ensue" after feed change
Python
agpl-3.0
datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "HijiNKS Ensue" language = "en" url = "http://hijinksensue.com/" start_date = "2007-05-11" rights = "Joel Watson" + active = False class Crawler(CrawlerBase): - history_capable_days = 180 + history_capable_date = '2015-03-11' time_zone = "US/Central" def crawl(self, pub_date): feed = self.parse_feed("http://hijinksensue.com/feed/") for entry in feed.for_date(pub_date): if "/comic/" not in entry.link: continue - url = entry.content0.src('img[src*="-300x120"]') + url = entry.content0.src('img[srcset*="-300x120"]') if not url: continue url = url.replace("-300x120", "") title = entry.title return CrawlerImage(url, title)
Update "HijiNKS Ensue" after feed change
## Code Before: from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "HijiNKS Ensue" language = "en" url = "http://hijinksensue.com/" start_date = "2007-05-11" rights = "Joel Watson" class Crawler(CrawlerBase): history_capable_days = 180 time_zone = "US/Central" def crawl(self, pub_date): feed = self.parse_feed("http://hijinksensue.com/feed/") for entry in feed.for_date(pub_date): if "/comic/" not in entry.link: continue url = entry.content0.src('img[src*="-300x120"]') if not url: continue url = url.replace("-300x120", "") title = entry.title return CrawlerImage(url, title) ## Instruction: Update "HijiNKS Ensue" after feed change ## Code After: from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "HijiNKS Ensue" language = "en" url = "http://hijinksensue.com/" start_date = "2007-05-11" rights = "Joel Watson" active = False class Crawler(CrawlerBase): history_capable_date = '2015-03-11' time_zone = "US/Central" def crawl(self, pub_date): feed = self.parse_feed("http://hijinksensue.com/feed/") for entry in feed.for_date(pub_date): if "/comic/" not in entry.link: continue url = entry.content0.src('img[srcset*="-300x120"]') if not url: continue url = url.replace("-300x120", "") title = entry.title return CrawlerImage(url, title)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "HijiNKS Ensue" language = "en" url = "http://hijinksensue.com/" start_date = "2007-05-11" rights = "Joel Watson" + active = False class Crawler(CrawlerBase): - history_capable_days = 180 ? ^^ ^ + history_capable_date = '2015-03-11' ? ^^ +++ ^^ +++++ time_zone = "US/Central" def crawl(self, pub_date): feed = self.parse_feed("http://hijinksensue.com/feed/") for entry in feed.for_date(pub_date): if "/comic/" not in entry.link: continue - url = entry.content0.src('img[src*="-300x120"]') + url = entry.content0.src('img[srcset*="-300x120"]') ? +++ if not url: continue url = url.replace("-300x120", "") title = entry.title return CrawlerImage(url, title)
e07b2e24cddc8a2e2d1c8838e8509b2009344714
util/BaseModel.py
util/BaseModel.py
from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
Add a utility method to get instances from urlsafe key.
Add a utility method to get instances from urlsafe key.
Python
apache-2.0
kkinder/GAEStarterKit,kkinder/GAEStarterKit,kkinder/GAEStarterKit
from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) + @classmethod + def from_urlsafe(cls, urlsafe): + key = ndb.Key(urlsafe=urlsafe) + obj = key.get() + if obj and isinstance(obj, cls): + return obj +
Add a utility method to get instances from urlsafe key.
## Code Before: from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) ## Instruction: Add a utility method to get instances from urlsafe key. ## Code After: from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) + + @classmethod + def from_urlsafe(cls, urlsafe): + key = ndb.Key(urlsafe=urlsafe) + obj = key.get() + if obj and isinstance(obj, cls): + return obj
00ae10769d95445b80be0e8d129fbc76b63aca5a
flexget/utils/soup.py
flexget/utils/soup.py
import html5lib from html5lib import treebuilders from cStringIO import StringIO def get_soup(obj): if isinstance(obj, basestring): obj = StringIO(obj) parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder('beautifulsoup')) return parser.parse(obj)
import html5lib from html5lib import treebuilders from cStringIO import StringIO # Hack, hide DataLossWarnings # Based on html5lib code namespaceHTMLElements=False should do it, but nope ... import warnings from html5lib.constants import DataLossWarning warnings.simplefilter('ignore', DataLossWarning) def get_soup(obj): if isinstance(obj, basestring): obj = StringIO(obj) parser = html5lib.HTMLParser(namespaceHTMLElements=False, tree=treebuilders.getTreeBuilder('beautifulsoup')) return parser.parse(obj)
Hide DataLossWarnings that appeared with html5lib 0.90 or so.
Hide DataLossWarnings that appeared with html5lib 0.90 or so. git-svn-id: ad91b9aa7ba7638d69f912c9f5d012e3326e9f74@1124 3942dd89-8c5d-46d7-aeed-044bccf3e60c
Python
mit
qk4l/Flexget,sean797/Flexget,ZefQ/Flexget,qk4l/Flexget,tobinjt/Flexget,grrr2/Flexget,jacobmetrick/Flexget,oxc/Flexget,camon/Flexget,crawln45/Flexget,xfouloux/Flexget,thalamus/Flexget,JorisDeRieck/Flexget,vfrc2/Flexget,patsissons/Flexget,cvium/Flexget,ianstalk/Flexget,tvcsantos/Flexget,jawilson/Flexget,OmgOhnoes/Flexget,v17al/Flexget,tvcsantos/Flexget,thalamus/Flexget,xfouloux/Flexget,cvium/Flexget,crawln45/Flexget,tobinjt/Flexget,vfrc2/Flexget,gazpachoking/Flexget,Pretagonist/Flexget,ibrahimkarahan/Flexget,Pretagonist/Flexget,dsemi/Flexget,Danfocus/Flexget,tsnoam/Flexget,lildadou/Flexget,Flexget/Flexget,Danfocus/Flexget,tobinjt/Flexget,ratoaq2/Flexget,camon/Flexget,tobinjt/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,poulpito/Flexget,v17al/Flexget,lildadou/Flexget,tarzasai/Flexget,offbyone/Flexget,jawilson/Flexget,Pretagonist/Flexget,ianstalk/Flexget,cvium/Flexget,v17al/Flexget,qvazzler/Flexget,X-dark/Flexget,ianstalk/Flexget,vfrc2/Flexget,malkavi/Flexget,sean797/Flexget,spencerjanssen/Flexget,tsnoam/Flexget,JorisDeRieck/Flexget,qvazzler/Flexget,LynxyssCZ/Flexget,antivirtel/Flexget,voriux/Flexget,ratoaq2/Flexget,lildadou/Flexget,drwyrm/Flexget,ZefQ/Flexget,ibrahimkarahan/Flexget,OmgOhnoes/Flexget,spencerjanssen/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,jacobmetrick/Flexget,drwyrm/Flexget,asm0dey/Flexget,malkavi/Flexget,offbyone/Flexget,offbyone/Flexget,poulpito/Flexget,qk4l/Flexget,asm0dey/Flexget,Flexget/Flexget,X-dark/Flexget,xfouloux/Flexget,voriux/Flexget,tarzasai/Flexget,crawln45/Flexget,ZefQ/Flexget,Flexget/Flexget,Danfocus/Flexget,thalamus/Flexget,drwyrm/Flexget,qvazzler/Flexget,crawln45/Flexget,X-dark/Flexget,tsnoam/Flexget,sean797/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,dsemi/Flexget,dsemi/Flexget,grrr2/Flexget,patsissons/Flexget,spencerjanssen/Flexget,jacobmetrick/Flexget,malkavi/Flexget,LynxyssCZ/Flexget,tarzasai/Flexget,ibrahimkarahan/Flexget,oxc/Flexget,ratoaq2/Flexget,jawilson/Flexget,poulpito/Flexget,gazpachoking/Flexget,malkavi/Flexget,antivirtel/Flexget,asm0dey/Flexget,Flexget/Flexget,oxc/Flexget,grrr2/Flexget,patsissons/Flexget,LynxyssCZ/Flexget,jawilson/Flexget
import html5lib from html5lib import treebuilders from cStringIO import StringIO + # Hack, hide DataLossWarnings + # Based on html5lib code namespaceHTMLElements=False should do it, but nope ... + import warnings + from html5lib.constants import DataLossWarning + warnings.simplefilter('ignore', DataLossWarning) + + def get_soup(obj): if isinstance(obj, basestring): obj = StringIO(obj) - parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder('beautifulsoup')) + parser = html5lib.HTMLParser(namespaceHTMLElements=False, tree=treebuilders.getTreeBuilder('beautifulsoup')) return parser.parse(obj)
Hide DataLossWarnings that appeared with html5lib 0.90 or so.
## Code Before: import html5lib from html5lib import treebuilders from cStringIO import StringIO def get_soup(obj): if isinstance(obj, basestring): obj = StringIO(obj) parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder('beautifulsoup')) return parser.parse(obj) ## Instruction: Hide DataLossWarnings that appeared with html5lib 0.90 or so. ## Code After: import html5lib from html5lib import treebuilders from cStringIO import StringIO # Hack, hide DataLossWarnings # Based on html5lib code namespaceHTMLElements=False should do it, but nope ... import warnings from html5lib.constants import DataLossWarning warnings.simplefilter('ignore', DataLossWarning) def get_soup(obj): if isinstance(obj, basestring): obj = StringIO(obj) parser = html5lib.HTMLParser(namespaceHTMLElements=False, tree=treebuilders.getTreeBuilder('beautifulsoup')) return parser.parse(obj)
import html5lib from html5lib import treebuilders from cStringIO import StringIO + # Hack, hide DataLossWarnings + # Based on html5lib code namespaceHTMLElements=False should do it, but nope ... + import warnings + from html5lib.constants import DataLossWarning + warnings.simplefilter('ignore', DataLossWarning) + + def get_soup(obj): if isinstance(obj, basestring): obj = StringIO(obj) - parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder('beautifulsoup')) + parser = html5lib.HTMLParser(namespaceHTMLElements=False, tree=treebuilders.getTreeBuilder('beautifulsoup')) ? +++++++++++++++++++++++++++++ return parser.parse(obj)
852f067c7aab6bdcaabf2550fc5a0995a7e9b0ae
maediprojects/__init__.py
maediprojects/__init__.py
from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.babel import Babel import os app = Flask(__name__.split('.')[0]) app.config.from_pyfile(os.path.join('..', 'config.py')) db = SQLAlchemy(app) babel = Babel(app) import routes @babel.localeselector def get_locale(): return app.config["BABEL_DEFAULT_LOCALE"]
from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.babel import Babel from flask.ext.mail import Mail import os app = Flask(__name__.split('.')[0]) app.config.from_pyfile(os.path.join('..', 'config.py')) db = SQLAlchemy(app) babel = Babel(app) mail = Mail(app) import routes @babel.localeselector def get_locale(): return app.config["BABEL_DEFAULT_LOCALE"]
Add flask-mail to use for emailing updates
Add flask-mail to use for emailing updates
Python
agpl-3.0
markbrough/maedi-projects,markbrough/maedi-projects,markbrough/maedi-projects
from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.babel import Babel + from flask.ext.mail import Mail import os app = Flask(__name__.split('.')[0]) app.config.from_pyfile(os.path.join('..', 'config.py')) db = SQLAlchemy(app) babel = Babel(app) + mail = Mail(app) import routes @babel.localeselector def get_locale(): return app.config["BABEL_DEFAULT_LOCALE"]
Add flask-mail to use for emailing updates
## Code Before: from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.babel import Babel import os app = Flask(__name__.split('.')[0]) app.config.from_pyfile(os.path.join('..', 'config.py')) db = SQLAlchemy(app) babel = Babel(app) import routes @babel.localeselector def get_locale(): return app.config["BABEL_DEFAULT_LOCALE"] ## Instruction: Add flask-mail to use for emailing updates ## Code After: from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.babel import Babel from flask.ext.mail import Mail import os app = Flask(__name__.split('.')[0]) app.config.from_pyfile(os.path.join('..', 'config.py')) db = SQLAlchemy(app) babel = Babel(app) mail = Mail(app) import routes @babel.localeselector def get_locale(): return app.config["BABEL_DEFAULT_LOCALE"]
from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.babel import Babel + from flask.ext.mail import Mail import os app = Flask(__name__.split('.')[0]) app.config.from_pyfile(os.path.join('..', 'config.py')) db = SQLAlchemy(app) babel = Babel(app) + mail = Mail(app) import routes @babel.localeselector def get_locale(): return app.config["BABEL_DEFAULT_LOCALE"]
547e2cbddd26f2e158fbbdab8ae22605cbd270c9
joby/items.py
joby/items.py
from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass
from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
Add JobItemLoader and DataScienceJobsItemLoader class.
Add JobItemLoader and DataScienceJobsItemLoader class.
Python
mit
cyberbikepunk/job-spiders
from scrapy import Item, Field + from scrapy.loader import Identity, ItemLoader + from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() - class DataScienceJobsJobItem(Job): + class DataScienceJobsJobItem(JobItem): pass + class JobItemLoader(ItemLoader): + default_input_processor = Identity() + default_output_processor = TakeFirst() + class DataScienceJobsItemLoader(JobItemLoader): + pass +
Add JobItemLoader and DataScienceJobsItemLoader class.
## Code Before: from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass ## Instruction: Add JobItemLoader and DataScienceJobsItemLoader class. ## Code After: from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
from scrapy import Item, Field + from scrapy.loader import Identity, ItemLoader + from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() - class DataScienceJobsJobItem(Job): + class DataScienceJobsJobItem(JobItem): ? ++++ pass + class JobItemLoader(ItemLoader): + default_input_processor = Identity() + default_output_processor = TakeFirst() + + class DataScienceJobsItemLoader(JobItemLoader): + pass
df25af8c12f824ee46a7bbf676f9adfcef5b1624
grazer/run.py
grazer/run.py
import click from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") def main(env, config): load_dotenv(env) cfg = Config(config) for record, link in crawler.create(cfg): print(record) if __name__ == "__main__": main()
import click import logging from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") @click.option("--log_level", default="INFO") def main(env, config, log_level): logging.basicConfig(level=getattr(logging, log_level)) load_dotenv(env) cfg = Config(config) for record, link in crawler.create(cfg): print(record) if __name__ == "__main__": main()
Allow to config log level
Allow to config log level
Python
mit
CodersOfTheNight/verata
import click + import logging from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") + @click.option("--log_level", default="INFO") - def main(env, config): + def main(env, config, log_level): + logging.basicConfig(level=getattr(logging, log_level)) load_dotenv(env) cfg = Config(config) for record, link in crawler.create(cfg): print(record) if __name__ == "__main__": main()
Allow to config log level
## Code Before: import click from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") def main(env, config): load_dotenv(env) cfg = Config(config) for record, link in crawler.create(cfg): print(record) if __name__ == "__main__": main() ## Instruction: Allow to config log level ## Code After: import click import logging from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") @click.option("--log_level", default="INFO") def main(env, config, log_level): logging.basicConfig(level=getattr(logging, log_level)) load_dotenv(env) cfg = Config(config) for record, link in crawler.create(cfg): print(record) if __name__ == "__main__": main()
import click + import logging from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") + @click.option("--log_level", default="INFO") - def main(env, config): + def main(env, config, log_level): ? +++++++++++ + logging.basicConfig(level=getattr(logging, log_level)) load_dotenv(env) cfg = Config(config) for record, link in crawler.create(cfg): print(record) if __name__ == "__main__": main()
35d5dc83d8abc4e33988ebf26a6fafadf9b815d4
fontGenerator/util.py
fontGenerator/util.py
import base64, os def get_content(texts): if isinstance(texts, str) or isinstance(texts, unicode): file_path = texts with open(file_path, 'r') as f: return list(f.read().decode("utf-8")) f.close() else: return texts def write_file( path, data ): with open( path, "w" ) as f: f.write(data) f.close() def read_by_base64(path): f = open(path, "r") data = f.read() f.close() return base64.b64encode(data) def delete_file(path): os.remove(path) def delete_files(paths): for path in paths: delete_file(path)
import base64, os def get_content(texts): if isinstance(texts, str) or isinstance(texts, unicode): file_path = texts with open(file_path, 'r') as f: return list(f.read().decode("utf-8")) f.close() else: return texts def write_file( path, data ): with open( path, "w" ) as f: f.write(data) f.close() def read_by_base64(path): with open(path, "r") as f: data = f.read() f.close() return base64.b64encode(data) def delete_file(path): os.remove(path) def delete_files(paths): for path in paths: delete_file(path)
Use `with` syntax to open file
Use `with` syntax to open file
Python
mit
eHanlin/font-generator,eHanlin/font-generator
import base64, os def get_content(texts): if isinstance(texts, str) or isinstance(texts, unicode): file_path = texts with open(file_path, 'r') as f: return list(f.read().decode("utf-8")) f.close() else: return texts def write_file( path, data ): with open( path, "w" ) as f: f.write(data) f.close() def read_by_base64(path): - f = open(path, "r") + with open(path, "r") as f: - data = f.read() + data = f.read() - f.close() + f.close() return base64.b64encode(data) def delete_file(path): os.remove(path) def delete_files(paths): for path in paths: delete_file(path)
Use `with` syntax to open file
## Code Before: import base64, os def get_content(texts): if isinstance(texts, str) or isinstance(texts, unicode): file_path = texts with open(file_path, 'r') as f: return list(f.read().decode("utf-8")) f.close() else: return texts def write_file( path, data ): with open( path, "w" ) as f: f.write(data) f.close() def read_by_base64(path): f = open(path, "r") data = f.read() f.close() return base64.b64encode(data) def delete_file(path): os.remove(path) def delete_files(paths): for path in paths: delete_file(path) ## Instruction: Use `with` syntax to open file ## Code After: import base64, os def get_content(texts): if isinstance(texts, str) or isinstance(texts, unicode): file_path = texts with open(file_path, 'r') as f: return list(f.read().decode("utf-8")) f.close() else: return texts def write_file( path, data ): with open( path, "w" ) as f: f.write(data) f.close() def read_by_base64(path): with open(path, "r") as f: data = f.read() f.close() return base64.b64encode(data) def delete_file(path): os.remove(path) def delete_files(paths): for path in paths: delete_file(path)
import base64, os def get_content(texts): if isinstance(texts, str) or isinstance(texts, unicode): file_path = texts with open(file_path, 'r') as f: return list(f.read().decode("utf-8")) f.close() else: return texts def write_file( path, data ): with open( path, "w" ) as f: f.write(data) f.close() def read_by_base64(path): - f = open(path, "r") ? ^^^ + with open(path, "r") as f: ? ^^^^ ++++++ - data = f.read() + data = f.read() ? ++++ - f.close() + f.close() ? ++++ return base64.b64encode(data) def delete_file(path): os.remove(path) def delete_files(paths): for path in paths: delete_file(path)
414c8fa0a5576645831d58c8fa1285c9aef3610d
conditional/blueprints/intro_evals.py
conditional/blueprints/intro_evals.py
from flask import Blueprint from flask import render_template from flask import request intro_evals_bp = Blueprint('intro_evals_bp', __name__) @intro_evals_bp.route('/intro_evals/') def display_intro_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'packet_due': '2015-12-23', 'eval_date': '2016-02-13', 'signatures_missed': 3, 'committee_meetings': 24, 'committee_meetings_passed': False, 'house_meetings_missed': 0, 'house_meetings_comments': "", 'technical_seminars': "Seminar 1\nSeminar 2", 'techincal_seminars_passed': True, 'social_events': "", 'freshmen_project': False, 'comments': "please don't fail me", 'result': 'Pending' } ] # return names in 'first last (username)' format return render_template('intro_evals.html', username = user_name, members = members)
from flask import Blueprint from flask import render_template from flask import request intro_evals_bp = Blueprint('intro_evals_bp', __name__) @intro_evals_bp.route('/intro_evals/') def display_intro_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'packet_due': '2015-12-23', 'eval_date': '2016-02-13', 'signatures_missed': 3, 'committee_meetings': 24, 'committee_meetings_passed': False, 'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}], 'technical_seminars': [{'date': "halloween", 'name': 'how to play videogames with liam'}], 'social_events': "", 'freshmen_project': False, 'comments': "please don't fail me", 'result': 'Pending' } ] # return names in 'first last (username)' format return render_template('intro_evals.html', username = user_name, members = members)
Edit intro evals data route
Edit intro evals data route
Python
mit
RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,RamZallan/conditional,ComputerScienceHouse/conditional,ComputerScienceHouse/conditional
from flask import Blueprint from flask import render_template from flask import request intro_evals_bp = Blueprint('intro_evals_bp', __name__) @intro_evals_bp.route('/intro_evals/') def display_intro_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'packet_due': '2015-12-23', 'eval_date': '2016-02-13', 'signatures_missed': 3, 'committee_meetings': 24, 'committee_meetings_passed': False, + 'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}], + 'technical_seminars': [{'date': "halloween", 'name': 'how to play videogames with liam'}], - 'house_meetings_missed': 0, - 'house_meetings_comments': "", - 'technical_seminars': "Seminar 1\nSeminar 2", - 'techincal_seminars_passed': True, 'social_events': "", 'freshmen_project': False, 'comments': "please don't fail me", 'result': 'Pending' } ] # return names in 'first last (username)' format return render_template('intro_evals.html', username = user_name, members = members)
Edit intro evals data route
## Code Before: from flask import Blueprint from flask import render_template from flask import request intro_evals_bp = Blueprint('intro_evals_bp', __name__) @intro_evals_bp.route('/intro_evals/') def display_intro_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'packet_due': '2015-12-23', 'eval_date': '2016-02-13', 'signatures_missed': 3, 'committee_meetings': 24, 'committee_meetings_passed': False, 'house_meetings_missed': 0, 'house_meetings_comments': "", 'technical_seminars': "Seminar 1\nSeminar 2", 'techincal_seminars_passed': True, 'social_events': "", 'freshmen_project': False, 'comments': "please don't fail me", 'result': 'Pending' } ] # return names in 'first last (username)' format return render_template('intro_evals.html', username = user_name, members = members) ## Instruction: Edit intro evals data route ## Code After: from flask import Blueprint from flask import render_template from flask import request intro_evals_bp = Blueprint('intro_evals_bp', __name__) @intro_evals_bp.route('/intro_evals/') def display_intro_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'packet_due': '2015-12-23', 'eval_date': '2016-02-13', 'signatures_missed': 3, 'committee_meetings': 24, 'committee_meetings_passed': False, 'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}], 'technical_seminars': [{'date': "halloween", 'name': 'how to play videogames with liam'}], 'social_events': "", 'freshmen_project': False, 'comments': "please don't fail me", 'result': 'Pending' } ] # return names in 'first last (username)' format return render_template('intro_evals.html', username = user_name, members = members)
from flask import Blueprint from flask import render_template from flask import request intro_evals_bp = Blueprint('intro_evals_bp', __name__) @intro_evals_bp.route('/intro_evals/') def display_intro_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'packet_due': '2015-12-23', 'eval_date': '2016-02-13', 'signatures_missed': 3, 'committee_meetings': 24, 'committee_meetings_passed': False, + 'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}], + 'technical_seminars': [{'date': "halloween", 'name': 'how to play videogames with liam'}], - 'house_meetings_missed': 0, - 'house_meetings_comments': "", - 'technical_seminars': "Seminar 1\nSeminar 2", - 'techincal_seminars_passed': True, 'social_events': "", 'freshmen_project': False, 'comments': "please don't fail me", 'result': 'Pending' } ] # return names in 'first last (username)' format return render_template('intro_evals.html', username = user_name, members = members)
e98f9fcc8537835b5a00bd0b6a755d7980a197de
template_tests/tests.py
template_tests/tests.py
import re import os from django.test import TestCase from .utils import get_template_dirs re_url = re.compile(r'\shref="(?P<url>(?!https?:|mailto:|\?|{|#)[^"]*)"') class TestTemplates(TestCase): def assertValidURLs(self, filename): with open(filename) as f: urls = [m.group('url') for m in re_url.finditer(f.read())] self.failIf(urls, "%s contains hardcoded URLs: %r" % ( filename, urls, )) idx = 0 for x in get_template_dirs(): for root, _, filenames in os.walk(x): for y in filenames: def wrapper(self, filename=os.path.join(root, y)): self.assertValidURLs(filename) idx += 1 locals()['test_template_idx_%04d' % idx] = wrapper
import re import os from django.test import TestCase from django.utils.text import slugify from .utils import get_template_dirs re_url = re.compile(r'\shref="(?P<url>(?!https?:|mailto:|\?|{|#)[^"]*)"') class TestTemplatesMeta(type): def __new__(cls, name, bases, attrs): def generate(template): def fn(self): self.assertValidURLs(template) return fn for x in get_template_dirs(): for root, _, templates in os.walk(x): for y in templates: template = os.path.join(root, y) attrs['test_%s' % slugify(template)] = generate(template) return super(TestTemplatesMeta, cls).__new__(cls, name, bases, attrs) class TestTemplates(TestCase): __metaclass__ = TestTemplatesMeta def assertValidURLs(self, template): with open(template) as f: urls = [m.group('url') for m in re_url.finditer(f.read())] self.failIf(urls, "%s contains hardcoded URLs: %r" % ( template, urls, ))
Use a metaclass instead of dirty dict()-mangling.
Use a metaclass instead of dirty dict()-mangling.
Python
bsd-3-clause
lamby/django-template-tests
import re import os from django.test import TestCase + from django.utils.text import slugify from .utils import get_template_dirs re_url = re.compile(r'\shref="(?P<url>(?!https?:|mailto:|\?|{|#)[^"]*)"') + class TestTemplatesMeta(type): + def __new__(cls, name, bases, attrs): + def generate(template): + def fn(self): + self.assertValidURLs(template) + return fn + + for x in get_template_dirs(): + for root, _, templates in os.walk(x): + for y in templates: + template = os.path.join(root, y) + + attrs['test_%s' % slugify(template)] = generate(template) + + return super(TestTemplatesMeta, cls).__new__(cls, name, bases, attrs) + class TestTemplates(TestCase): + __metaclass__ = TestTemplatesMeta + - def assertValidURLs(self, filename): + def assertValidURLs(self, template): - with open(filename) as f: + with open(template) as f: urls = [m.group('url') for m in re_url.finditer(f.read())] self.failIf(urls, "%s contains hardcoded URLs: %r" % ( - filename, + template, urls, )) - idx = 0 - for x in get_template_dirs(): - for root, _, filenames in os.walk(x): - for y in filenames: - def wrapper(self, filename=os.path.join(root, y)): - self.assertValidURLs(filename) - idx += 1 - locals()['test_template_idx_%04d' % idx] = wrapper -
Use a metaclass instead of dirty dict()-mangling.
## Code Before: import re import os from django.test import TestCase from .utils import get_template_dirs re_url = re.compile(r'\shref="(?P<url>(?!https?:|mailto:|\?|{|#)[^"]*)"') class TestTemplates(TestCase): def assertValidURLs(self, filename): with open(filename) as f: urls = [m.group('url') for m in re_url.finditer(f.read())] self.failIf(urls, "%s contains hardcoded URLs: %r" % ( filename, urls, )) idx = 0 for x in get_template_dirs(): for root, _, filenames in os.walk(x): for y in filenames: def wrapper(self, filename=os.path.join(root, y)): self.assertValidURLs(filename) idx += 1 locals()['test_template_idx_%04d' % idx] = wrapper ## Instruction: Use a metaclass instead of dirty dict()-mangling. ## Code After: import re import os from django.test import TestCase from django.utils.text import slugify from .utils import get_template_dirs re_url = re.compile(r'\shref="(?P<url>(?!https?:|mailto:|\?|{|#)[^"]*)"') class TestTemplatesMeta(type): def __new__(cls, name, bases, attrs): def generate(template): def fn(self): self.assertValidURLs(template) return fn for x in get_template_dirs(): for root, _, templates in os.walk(x): for y in templates: template = os.path.join(root, y) attrs['test_%s' % slugify(template)] = generate(template) return super(TestTemplatesMeta, cls).__new__(cls, name, bases, attrs) class TestTemplates(TestCase): __metaclass__ = TestTemplatesMeta def assertValidURLs(self, template): with open(template) as f: urls = [m.group('url') for m in re_url.finditer(f.read())] self.failIf(urls, "%s contains hardcoded URLs: %r" % ( template, urls, ))
import re import os from django.test import TestCase + from django.utils.text import slugify from .utils import get_template_dirs re_url = re.compile(r'\shref="(?P<url>(?!https?:|mailto:|\?|{|#)[^"]*)"') + class TestTemplatesMeta(type): + def __new__(cls, name, bases, attrs): + def generate(template): + def fn(self): + self.assertValidURLs(template) + return fn + + for x in get_template_dirs(): + for root, _, templates in os.walk(x): + for y in templates: + template = os.path.join(root, y) + + attrs['test_%s' % slugify(template)] = generate(template) + + return super(TestTemplatesMeta, cls).__new__(cls, name, bases, attrs) + class TestTemplates(TestCase): + __metaclass__ = TestTemplatesMeta + - def assertValidURLs(self, filename): ? ^^ -- ^ + def assertValidURLs(self, template): ? ^^^^ ^ - with open(filename) as f: ? ^^ -- ^ + with open(template) as f: ? ^^^^ ^ urls = [m.group('url') for m in re_url.finditer(f.read())] self.failIf(urls, "%s contains hardcoded URLs: %r" % ( - filename, ? ^^ -- ^ + template, ? ^^^^ ^ urls, )) - - idx = 0 - for x in get_template_dirs(): - for root, _, filenames in os.walk(x): - for y in filenames: - def wrapper(self, filename=os.path.join(root, y)): - self.assertValidURLs(filename) - idx += 1 - locals()['test_template_idx_%04d' % idx] = wrapper
1e775fbc8e11f44b8a680e17ac35e735e52d5739
fabfile.py
fabfile.py
from fabric.api import run, env from fabric.context_managers import cd import os env.hosts = ['[email protected]:1337'] def update_podcasts(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py updatepodcasts') def setup_dev(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py syncdb') run('python3 manage.py loaddata sample_podcasts') run('python3 manage.py updatepodcasts') run('python3 manage.py fetchepisodes') run('python3 manage.py update_index') def rebuild_index(): with cd('"{}"'.format(os.path.dirname(__file__))): # Add --noinput flag because of this issue: # https://github.com/toastdriven/django-haystack/issues/902 run('python3 manage.py rebuild_index --noinput')
from fabric.api import run, env from fabric.context_managers import cd import os env.hosts = ['[email protected]:1337'] def update_podcasts(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py updatepodcasts') def fetch_episodes(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py fetchepisodes') def setup_dev(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py syncdb') run('python3 manage.py loaddata sample_podcasts') run('python3 manage.py updatepodcasts') run('python3 manage.py fetchepisodes') run('python3 manage.py update_index') def rebuild_index(): with cd('"{}"'.format(os.path.dirname(__file__))): # Add --noinput flag because of this issue: # https://github.com/toastdriven/django-haystack/issues/902 run('python3 manage.py rebuild_index --noinput')
Add fab command for fetching episodes
Add fab command for fetching episodes
Python
mit
matachi/sputnik,matachi/sputnik,matachi/sputnik,matachi/sputnik
from fabric.api import run, env from fabric.context_managers import cd import os env.hosts = ['[email protected]:1337'] def update_podcasts(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py updatepodcasts') + + + def fetch_episodes(): + with cd('"{}"'.format(os.path.dirname(__file__))): + run('python3 manage.py fetchepisodes') def setup_dev(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py syncdb') run('python3 manage.py loaddata sample_podcasts') run('python3 manage.py updatepodcasts') run('python3 manage.py fetchepisodes') run('python3 manage.py update_index') def rebuild_index(): with cd('"{}"'.format(os.path.dirname(__file__))): # Add --noinput flag because of this issue: # https://github.com/toastdriven/django-haystack/issues/902 run('python3 manage.py rebuild_index --noinput')
Add fab command for fetching episodes
## Code Before: from fabric.api import run, env from fabric.context_managers import cd import os env.hosts = ['[email protected]:1337'] def update_podcasts(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py updatepodcasts') def setup_dev(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py syncdb') run('python3 manage.py loaddata sample_podcasts') run('python3 manage.py updatepodcasts') run('python3 manage.py fetchepisodes') run('python3 manage.py update_index') def rebuild_index(): with cd('"{}"'.format(os.path.dirname(__file__))): # Add --noinput flag because of this issue: # https://github.com/toastdriven/django-haystack/issues/902 run('python3 manage.py rebuild_index --noinput') ## Instruction: Add fab command for fetching episodes ## Code After: from fabric.api import run, env from fabric.context_managers import cd import os env.hosts = ['[email protected]:1337'] def update_podcasts(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py updatepodcasts') def fetch_episodes(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py fetchepisodes') def setup_dev(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py syncdb') run('python3 manage.py loaddata sample_podcasts') run('python3 manage.py updatepodcasts') run('python3 manage.py fetchepisodes') run('python3 manage.py update_index') def rebuild_index(): with cd('"{}"'.format(os.path.dirname(__file__))): # Add --noinput flag because of this issue: # https://github.com/toastdriven/django-haystack/issues/902 run('python3 manage.py rebuild_index --noinput')
from fabric.api import run, env from fabric.context_managers import cd import os env.hosts = ['[email protected]:1337'] def update_podcasts(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py updatepodcasts') + + + def fetch_episodes(): + with cd('"{}"'.format(os.path.dirname(__file__))): + run('python3 manage.py fetchepisodes') def setup_dev(): with cd('"{}"'.format(os.path.dirname(__file__))): run('python3 manage.py syncdb') run('python3 manage.py loaddata sample_podcasts') run('python3 manage.py updatepodcasts') run('python3 manage.py fetchepisodes') run('python3 manage.py update_index') def rebuild_index(): with cd('"{}"'.format(os.path.dirname(__file__))): # Add --noinput flag because of this issue: # https://github.com/toastdriven/django-haystack/issues/902 run('python3 manage.py rebuild_index --noinput')
61465e1df2f43d2d82b40ddb15c17bee4ddcccda
src/poliastro/ephem.py
src/poliastro/ephem.py
import numpy as np from scipy.interpolate import interp1d from astropy import units as u from astropy.time import Time from poliastro.bodies import Moon from poliastro.twobody.orbit import Orbit from poliastro.coordinates import transform from astropy.coordinates import ICRS, GCRS def build_ephem_interpolant(body, period, t_span, rtol=1e-5): h = (period * rtol).to(u.day).value t_span = ((t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)) t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) r_values = np.zeros((t_values.shape[0], 3)) for i, t in enumerate(t_values): epoch = Time(t, format='jd', scale='tdb') body_t = Orbit.from_body_ephem(body, epoch) if body != Moon: body_t = transform(body_t, ICRS, GCRS) r_values[i] = body_t.r t_values = ((t_values - t_span[0]) * u.day).to(u.s).value return interp1d(t_values, r_values, kind='cubic', axis=0, assume_sorted=True)
import numpy as np from scipy.interpolate import interp1d from astropy import units as u from astropy.time import Time from astropy.coordinates import get_body_barycentric, ICRS, GCRS, CartesianRepresentation def build_ephem_interpolant(body, period, t_span, rtol=1e-5): h = (period * rtol).to(u.day).value t_span = ((t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)) t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) r_values = np.zeros((t_values.shape[0], 3)) for i, t in enumerate(t_values): epoch = Time(t, format='jd', scale='tdb') r = get_body_barycentric(body.name, epoch) r = (ICRS(x=r.x, y=r.y, z=r.z, representation_type=CartesianRepresentation) .transform_to(GCRS(obstime=epoch)) .represent_as(CartesianRepresentation) ) r_values[i] = r.xyz.to(u.km) t_values = ((t_values - t_span[0]) * u.day).to(u.s).value return interp1d(t_values, r_values, kind='cubic', axis=0, assume_sorted=True)
Fix 3rd body tests for Moon, simplify interpolant code
Fix 3rd body tests for Moon, simplify interpolant code
Python
mit
Juanlu001/poliastro,Juanlu001/poliastro,newlawrence/poliastro,Juanlu001/poliastro,newlawrence/poliastro,poliastro/poliastro,newlawrence/poliastro
import numpy as np from scipy.interpolate import interp1d from astropy import units as u from astropy.time import Time + from astropy.coordinates import get_body_barycentric, ICRS, GCRS, CartesianRepresentation - - from poliastro.bodies import Moon - from poliastro.twobody.orbit import Orbit - from poliastro.coordinates import transform - from astropy.coordinates import ICRS, GCRS def build_ephem_interpolant(body, period, t_span, rtol=1e-5): h = (period * rtol).to(u.day).value t_span = ((t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)) t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) r_values = np.zeros((t_values.shape[0], 3)) for i, t in enumerate(t_values): epoch = Time(t, format='jd', scale='tdb') - body_t = Orbit.from_body_ephem(body, epoch) - if body != Moon: - body_t = transform(body_t, ICRS, GCRS) - r_values[i] = body_t.r + + r = get_body_barycentric(body.name, epoch) + r = (ICRS(x=r.x, y=r.y, z=r.z, representation_type=CartesianRepresentation) + .transform_to(GCRS(obstime=epoch)) + .represent_as(CartesianRepresentation) + ) + + r_values[i] = r.xyz.to(u.km) t_values = ((t_values - t_span[0]) * u.day).to(u.s).value return interp1d(t_values, r_values, kind='cubic', axis=0, assume_sorted=True)
Fix 3rd body tests for Moon, simplify interpolant code
## Code Before: import numpy as np from scipy.interpolate import interp1d from astropy import units as u from astropy.time import Time from poliastro.bodies import Moon from poliastro.twobody.orbit import Orbit from poliastro.coordinates import transform from astropy.coordinates import ICRS, GCRS def build_ephem_interpolant(body, period, t_span, rtol=1e-5): h = (period * rtol).to(u.day).value t_span = ((t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)) t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) r_values = np.zeros((t_values.shape[0], 3)) for i, t in enumerate(t_values): epoch = Time(t, format='jd', scale='tdb') body_t = Orbit.from_body_ephem(body, epoch) if body != Moon: body_t = transform(body_t, ICRS, GCRS) r_values[i] = body_t.r t_values = ((t_values - t_span[0]) * u.day).to(u.s).value return interp1d(t_values, r_values, kind='cubic', axis=0, assume_sorted=True) ## Instruction: Fix 3rd body tests for Moon, simplify interpolant code ## Code After: import numpy as np from scipy.interpolate import interp1d from astropy import units as u from astropy.time import Time from astropy.coordinates import get_body_barycentric, ICRS, GCRS, CartesianRepresentation def build_ephem_interpolant(body, period, t_span, rtol=1e-5): h = (period * rtol).to(u.day).value t_span = ((t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)) t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) r_values = np.zeros((t_values.shape[0], 3)) for i, t in enumerate(t_values): epoch = Time(t, format='jd', scale='tdb') r = get_body_barycentric(body.name, epoch) r = (ICRS(x=r.x, y=r.y, z=r.z, representation_type=CartesianRepresentation) .transform_to(GCRS(obstime=epoch)) .represent_as(CartesianRepresentation) ) r_values[i] = r.xyz.to(u.km) t_values = ((t_values - t_span[0]) * u.day).to(u.s).value return interp1d(t_values, r_values, kind='cubic', axis=0, assume_sorted=True)
import numpy as np from scipy.interpolate import interp1d from astropy import units as u from astropy.time import Time + from astropy.coordinates import get_body_barycentric, ICRS, GCRS, CartesianRepresentation - - from poliastro.bodies import Moon - from poliastro.twobody.orbit import Orbit - from poliastro.coordinates import transform - from astropy.coordinates import ICRS, GCRS def build_ephem_interpolant(body, period, t_span, rtol=1e-5): h = (period * rtol).to(u.day).value t_span = ((t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)) t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) r_values = np.zeros((t_values.shape[0], 3)) for i, t in enumerate(t_values): epoch = Time(t, format='jd', scale='tdb') - body_t = Orbit.from_body_ephem(body, epoch) - if body != Moon: - body_t = transform(body_t, ICRS, GCRS) - r_values[i] = body_t.r + + r = get_body_barycentric(body.name, epoch) + r = (ICRS(x=r.x, y=r.y, z=r.z, representation_type=CartesianRepresentation) + .transform_to(GCRS(obstime=epoch)) + .represent_as(CartesianRepresentation) + ) + + r_values[i] = r.xyz.to(u.km) t_values = ((t_values - t_span[0]) * u.day).to(u.s).value return interp1d(t_values, r_values, kind='cubic', axis=0, assume_sorted=True)
284d750d7da25b1d3db17ca4d5931e1b6d1d7319
tests/browser/test_editor.py
tests/browser/test_editor.py
from fancypages.test.testcases import SplinterTestCase class TestEditingFancyPage(SplinterTestCase): is_staff = True is_logged_in = True def test_moving_a_block(self): pass
from django.core.urlresolvers import reverse from fancypages.test.testcases import SplinterTestCase class TestTheEditorPanel(SplinterTestCase): is_staff = True is_logged_in = True def _get_cookie_names(self): return [c.get('name') for c in self.browser.cookies.all()] def test_can_be_opened_by_clicking_the_handle(self): self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-handle').click() self.assertFalse(body_tag.has_class('editor-hidden')) self.assertIn('fpEditorOpened', self._get_cookie_names()) def test_can_be_closed_by_clicking_the_x(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-close').click() body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.assertNotIn('fpEditorOpened', self._get_cookie_names()) def test_remains_opened_when_reloading_the_page(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden'))
Add tests for editor panel JS
Add tests for editor panel JS
Python
bsd-3-clause
tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages
+ from django.core.urlresolvers import reverse + from fancypages.test.testcases import SplinterTestCase - class TestEditingFancyPage(SplinterTestCase): + class TestTheEditorPanel(SplinterTestCase): is_staff = True is_logged_in = True - def test_moving_a_block(self): - pass + def _get_cookie_names(self): + return [c.get('name') for c in self.browser.cookies.all()] + def test_can_be_opened_by_clicking_the_handle(self): + self.goto(reverse('home')) + body_tag = self.browser.find_by_css('body').first + self.assertTrue(body_tag.has_class('editor-hidden')) + + self.browser.find_by_css('#editor-handle').click() + self.assertFalse(body_tag.has_class('editor-hidden')) + self.assertIn('fpEditorOpened', self._get_cookie_names()) + + def test_can_be_closed_by_clicking_the_x(self): + self.goto(reverse('home')) + self.browser.find_by_css('#editor-handle').click() + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) + + self.browser.find_by_css('#editor-close').click() + body_tag = self.browser.find_by_css('body').first + self.assertTrue(body_tag.has_class('editor-hidden')) + self.assertNotIn('fpEditorOpened', self._get_cookie_names()) + + def test_remains_opened_when_reloading_the_page(self): + self.goto(reverse('home')) + self.browser.find_by_css('#editor-handle').click() + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) + + self.goto(reverse('home')) + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) +
Add tests for editor panel JS
## Code Before: from fancypages.test.testcases import SplinterTestCase class TestEditingFancyPage(SplinterTestCase): is_staff = True is_logged_in = True def test_moving_a_block(self): pass ## Instruction: Add tests for editor panel JS ## Code After: from django.core.urlresolvers import reverse from fancypages.test.testcases import SplinterTestCase class TestTheEditorPanel(SplinterTestCase): is_staff = True is_logged_in = True def _get_cookie_names(self): return [c.get('name') for c in self.browser.cookies.all()] def test_can_be_opened_by_clicking_the_handle(self): self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-handle').click() self.assertFalse(body_tag.has_class('editor-hidden')) self.assertIn('fpEditorOpened', self._get_cookie_names()) def test_can_be_closed_by_clicking_the_x(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-close').click() body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.assertNotIn('fpEditorOpened', self._get_cookie_names()) def test_remains_opened_when_reloading_the_page(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden'))
+ from django.core.urlresolvers import reverse + from fancypages.test.testcases import SplinterTestCase - class TestEditingFancyPage(SplinterTestCase): ? ^^^^ ----- + class TestTheEditorPanel(SplinterTestCase): ? +++ ^^^ + is_staff = True is_logged_in = True - def test_moving_a_block(self): - pass + def _get_cookie_names(self): + return [c.get('name') for c in self.browser.cookies.all()] + + def test_can_be_opened_by_clicking_the_handle(self): + self.goto(reverse('home')) + body_tag = self.browser.find_by_css('body').first + self.assertTrue(body_tag.has_class('editor-hidden')) + + self.browser.find_by_css('#editor-handle').click() + self.assertFalse(body_tag.has_class('editor-hidden')) + self.assertIn('fpEditorOpened', self._get_cookie_names()) + + def test_can_be_closed_by_clicking_the_x(self): + self.goto(reverse('home')) + self.browser.find_by_css('#editor-handle').click() + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) + + self.browser.find_by_css('#editor-close').click() + body_tag = self.browser.find_by_css('body').first + self.assertTrue(body_tag.has_class('editor-hidden')) + self.assertNotIn('fpEditorOpened', self._get_cookie_names()) + + def test_remains_opened_when_reloading_the_page(self): + self.goto(reverse('home')) + self.browser.find_by_css('#editor-handle').click() + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) + + self.goto(reverse('home')) + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden'))
09f429e76a7b2cd49ea66b70d314bb4510971a5f
gui.py
gui.py
import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): Gtk.Window.__init__(self, title="") win = MainWindow() win.connect("delete-event", Gtk.main_quit) win.show_all() Gtk.main()
import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): Gtk.Window.__init__(self, title="Text Playing Game") self.set_border_width(10) self.set_size_request(500, 400) win = MainWindow() win.connect("delete-event", Gtk.main_quit) win.show_all() Gtk.main()
Set GUI title and size
Set GUI title and size
Python
mit
Giovanni21M/Text-Playing-Game
import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): - Gtk.Window.__init__(self, title="") + Gtk.Window.__init__(self, title="Text Playing Game") + self.set_border_width(10) + self.set_size_request(500, 400) win = MainWindow() win.connect("delete-event", Gtk.main_quit) win.show_all() Gtk.main()
Set GUI title and size
## Code Before: import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): Gtk.Window.__init__(self, title="") win = MainWindow() win.connect("delete-event", Gtk.main_quit) win.show_all() Gtk.main() ## Instruction: Set GUI title and size ## Code After: import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): Gtk.Window.__init__(self, title="Text Playing Game") self.set_border_width(10) self.set_size_request(500, 400) win = MainWindow() win.connect("delete-event", Gtk.main_quit) win.show_all() Gtk.main()
import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): - Gtk.Window.__init__(self, title="") + Gtk.Window.__init__(self, title="Text Playing Game") ? +++++++++++++++++ + self.set_border_width(10) + self.set_size_request(500, 400) win = MainWindow() win.connect("delete-event", Gtk.main_quit) win.show_all() Gtk.main()
63814839642e593e35f8afaf68fc6724b69075b5
trade_server.py
trade_server.py
import json import threading import socket import SocketServer from orderbook import match_bid, offers, asks messages = [] class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): def handle(self): try: while True: data = self.request.recv(1024) if data: data = json.loads(data) messages.append(data) print "MESSAGES: {}".format(messages) if data['type'] == 'bid': response = handle_bid(data) elif data['type'] == 'ask': response = handle_asks(data) cur_thread = threading.current_thread() response = "\n{}: {}".format(cur_thread.name, data) self.request.sendall(response) except socket.error: # Surpress errno 13 Broken Pipe pass class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass def create_server(host="localhost", port=0): server = ThreadedTCPServer((host, port), ThreadedTCPRequestHandler) server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() return server def handle_ask(ask): asks.append(ask) def handle_bid(bid): bids.append(bid)
import json import threading import socket import SocketServer from orderbook import asks, bids class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): def handle(self): try: while True: data = self.request.recv(1024) if data: response = handle_data(data) cur_thread = threading.current_thread() response = "\n{}: {}".format(cur_thread.name, data) self.request.sendall(response) except socket.error: # Surpress errno 13 Broken Pipe pass class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass def create_server(host="localhost", port=0): server = ThreadedTCPServer((host, port), ThreadedTCPRequestHandler) server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() return server def handle_data(data): data = json.loads(data) if data['type'] == 'ask': handle_ask(data) elif data['type'] == 'bid': handle_bid(data) elif data['type'] == 'greeting': handle_greeting(data) def handle_ask(ask): asks.append(ask) def handle_bid(bid): bids.append(bid) def handle_greeting(greeting): pass
Add stubs for handling requests to server.
Add stubs for handling requests to server.
Python
mit
Tribler/decentral-market
import json import threading import socket import SocketServer + from orderbook import asks, bids - from orderbook import match_bid, offers, asks - - messages = [] class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): def handle(self): try: while True: data = self.request.recv(1024) if data: - data = json.loads(data) - messages.append(data) - print "MESSAGES: {}".format(messages) - if data['type'] == 'bid': - response = handle_bid(data) + response = handle_data(data) - elif data['type'] == 'ask': - response = handle_asks(data) cur_thread = threading.current_thread() response = "\n{}: {}".format(cur_thread.name, data) self.request.sendall(response) except socket.error: # Surpress errno 13 Broken Pipe pass class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass def create_server(host="localhost", port=0): server = ThreadedTCPServer((host, port), ThreadedTCPRequestHandler) server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() return server + def handle_data(data): + data = json.loads(data) + if data['type'] == 'ask': + handle_ask(data) + elif data['type'] == 'bid': + handle_bid(data) + elif data['type'] == 'greeting': + handle_greeting(data) + + def handle_ask(ask): asks.append(ask) + def handle_bid(bid): bids.append(bid) + + def handle_greeting(greeting): + pass +
Add stubs for handling requests to server.
## Code Before: import json import threading import socket import SocketServer from orderbook import match_bid, offers, asks messages = [] class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): def handle(self): try: while True: data = self.request.recv(1024) if data: data = json.loads(data) messages.append(data) print "MESSAGES: {}".format(messages) if data['type'] == 'bid': response = handle_bid(data) elif data['type'] == 'ask': response = handle_asks(data) cur_thread = threading.current_thread() response = "\n{}: {}".format(cur_thread.name, data) self.request.sendall(response) except socket.error: # Surpress errno 13 Broken Pipe pass class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass def create_server(host="localhost", port=0): server = ThreadedTCPServer((host, port), ThreadedTCPRequestHandler) server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() return server def handle_ask(ask): asks.append(ask) def handle_bid(bid): bids.append(bid) ## Instruction: Add stubs for handling requests to server. ## Code After: import json import threading import socket import SocketServer from orderbook import asks, bids class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): def handle(self): try: while True: data = self.request.recv(1024) if data: response = handle_data(data) cur_thread = threading.current_thread() response = "\n{}: {}".format(cur_thread.name, data) self.request.sendall(response) except socket.error: # Surpress errno 13 Broken Pipe pass class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass def create_server(host="localhost", port=0): server = ThreadedTCPServer((host, port), ThreadedTCPRequestHandler) server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() return server def handle_data(data): data = json.loads(data) if data['type'] == 'ask': handle_ask(data) elif data['type'] == 'bid': handle_bid(data) elif data['type'] == 'greeting': handle_greeting(data) def handle_ask(ask): asks.append(ask) def handle_bid(bid): bids.append(bid) def handle_greeting(greeting): pass
import json import threading import socket import SocketServer + from orderbook import asks, bids - from orderbook import match_bid, offers, asks - - messages = [] class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): def handle(self): try: while True: data = self.request.recv(1024) if data: - data = json.loads(data) - messages.append(data) - print "MESSAGES: {}".format(messages) - if data['type'] == 'bid': - response = handle_bid(data) ? ---- -- + response = handle_data(data) ? +++ - elif data['type'] == 'ask': - response = handle_asks(data) cur_thread = threading.current_thread() response = "\n{}: {}".format(cur_thread.name, data) self.request.sendall(response) except socket.error: # Surpress errno 13 Broken Pipe pass class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass def create_server(host="localhost", port=0): server = ThreadedTCPServer((host, port), ThreadedTCPRequestHandler) server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() return server + def handle_data(data): + data = json.loads(data) + if data['type'] == 'ask': + handle_ask(data) + elif data['type'] == 'bid': + handle_bid(data) + elif data['type'] == 'greeting': + handle_greeting(data) + + def handle_ask(ask): asks.append(ask) + def handle_bid(bid): bids.append(bid) + + + def handle_greeting(greeting): + pass
2161910a53604bdc48027c5c4e71f9af4228cbaa
keras/backend/common.py
keras/backend/common.py
import numpy as np # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 def epsilon(): return _EPSILON def set_epsilon(e): global _EPSILON _EPSILON = e def floatx(): return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) if isinstance(floatx, unicode): floatx = floatx.encode('ascii') _FLOATX = floatx def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX)
import numpy as np # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 def epsilon(): return _EPSILON def set_epsilon(e): global _EPSILON _EPSILON = e def floatx(): return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) floatx = floatx.encode('ascii') _FLOATX = floatx def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX)
Fix floatx encoding on Python3
Fix floatx encoding on Python3
Python
apache-2.0
keras-team/keras,nebw/keras,daviddiazvico/keras,kemaswill/keras,DeepGnosis/keras,keras-team/keras,dolaameng/keras,relh/keras,kuza55/keras
import numpy as np # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 def epsilon(): return _EPSILON def set_epsilon(e): global _EPSILON _EPSILON = e def floatx(): return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) - if isinstance(floatx, unicode): - floatx = floatx.encode('ascii') + floatx = floatx.encode('ascii') _FLOATX = floatx def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX)
Fix floatx encoding on Python3
## Code Before: import numpy as np # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 def epsilon(): return _EPSILON def set_epsilon(e): global _EPSILON _EPSILON = e def floatx(): return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) if isinstance(floatx, unicode): floatx = floatx.encode('ascii') _FLOATX = floatx def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX) ## Instruction: Fix floatx encoding on Python3 ## Code After: import numpy as np # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 def epsilon(): return _EPSILON def set_epsilon(e): global _EPSILON _EPSILON = e def floatx(): return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) floatx = floatx.encode('ascii') _FLOATX = floatx def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX)
import numpy as np # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 def epsilon(): return _EPSILON def set_epsilon(e): global _EPSILON _EPSILON = e def floatx(): return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) - if isinstance(floatx, unicode): - floatx = floatx.encode('ascii') ? ---- + floatx = floatx.encode('ascii') _FLOATX = floatx def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX)
7a732c70fb5e07181aeb8f2386230fbecf0667e9
test/test_historynode.py
test/test_historynode.py
""" Tests for the HistoryNode module """ pass
""" Tests for the HistoryNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import historynode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestHistoryNode(unittest.TestCase): """ Tests for the historynode module, containing the HistoryNode class """ def test_print_board(self): """Check that print_board works""" with captured_output() as out: hn_obj = historynode.HistoryNode() hn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("Player 1: None\n" "Player 2: None\n" "Result: None\n" "Game Type: None\n" "Fox Search: None\n" "Goose Search: None\n" "Half Move: None\n" " -1 -1 -1 \n" " -1 -1 -1 \n" "-1 -1 -1 -1 -1 -1 -1\n" "-1 -1 -1 -1 -1 -1 -1\n" "-1 -1 -1 -1 -1 -1 -1\n" " -1 -1 -1 \n" " -1 -1 -1") self.assertEqual(actual_print, expected_print)
Add unit test for print_board()
Add unit test for print_board()
Python
mit
blairck/jaeger
""" Tests for the HistoryNode module """ - pass + from contextlib import contextmanager + from io import StringIO + import sys + import unittest + from src import historynode + + @contextmanager + def captured_output(): + """ Redirects stdout to StringIO so we can inspect Print statements """ + new_out = StringIO() + old_out = sys.stdout + try: + sys.stdout = new_out + yield sys.stdout + finally: + sys.stdout = old_out + + class TestHistoryNode(unittest.TestCase): + """ Tests for the historynode module, containing the HistoryNode class """ + + def test_print_board(self): + """Check that print_board works""" + with captured_output() as out: + hn_obj = historynode.HistoryNode() + hn_obj.print_board() + actual_print = out.getvalue().strip() + expected_print = ("Player 1: None\n" + "Player 2: None\n" + "Result: None\n" + "Game Type: None\n" + "Fox Search: None\n" + "Goose Search: None\n" + "Half Move: None\n" + " -1 -1 -1 \n" + " -1 -1 -1 \n" + "-1 -1 -1 -1 -1 -1 -1\n" + "-1 -1 -1 -1 -1 -1 -1\n" + "-1 -1 -1 -1 -1 -1 -1\n" + " -1 -1 -1 \n" + " -1 -1 -1") + self.assertEqual(actual_print, expected_print) +
Add unit test for print_board()
## Code Before: """ Tests for the HistoryNode module """ pass ## Instruction: Add unit test for print_board() ## Code After: """ Tests for the HistoryNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import historynode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestHistoryNode(unittest.TestCase): """ Tests for the historynode module, containing the HistoryNode class """ def test_print_board(self): """Check that print_board works""" with captured_output() as out: hn_obj = historynode.HistoryNode() hn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("Player 1: None\n" "Player 2: None\n" "Result: None\n" "Game Type: None\n" "Fox Search: None\n" "Goose Search: None\n" "Half Move: None\n" " -1 -1 -1 \n" " -1 -1 -1 \n" "-1 -1 -1 -1 -1 -1 -1\n" "-1 -1 -1 -1 -1 -1 -1\n" "-1 -1 -1 -1 -1 -1 -1\n" " -1 -1 -1 \n" " -1 -1 -1") self.assertEqual(actual_print, expected_print)
""" Tests for the HistoryNode module """ - pass + from contextlib import contextmanager + from io import StringIO + import sys + import unittest + + from src import historynode + + @contextmanager + def captured_output(): + """ Redirects stdout to StringIO so we can inspect Print statements """ + new_out = StringIO() + old_out = sys.stdout + try: + sys.stdout = new_out + yield sys.stdout + finally: + sys.stdout = old_out + + class TestHistoryNode(unittest.TestCase): + """ Tests for the historynode module, containing the HistoryNode class """ + + def test_print_board(self): + """Check that print_board works""" + with captured_output() as out: + hn_obj = historynode.HistoryNode() + hn_obj.print_board() + actual_print = out.getvalue().strip() + expected_print = ("Player 1: None\n" + "Player 2: None\n" + "Result: None\n" + "Game Type: None\n" + "Fox Search: None\n" + "Goose Search: None\n" + "Half Move: None\n" + " -1 -1 -1 \n" + " -1 -1 -1 \n" + "-1 -1 -1 -1 -1 -1 -1\n" + "-1 -1 -1 -1 -1 -1 -1\n" + "-1 -1 -1 -1 -1 -1 -1\n" + " -1 -1 -1 \n" + " -1 -1 -1") + self.assertEqual(actual_print, expected_print)
450e9415f90c92d64f814c363248db8250c5a8f2
rest_framework_json_api/mixins.py
rest_framework_json_api/mixins.py
class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]') if ids: self.queryset = self.queryset.filter(id__in=ids) return self.queryset
class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params'): ids = dict(self.request.query_params).get('ids[]') else: ids = dict(self.request.QUERY_PARAMS).get('ids[]') if ids: self.queryset = self.queryset.filter(id__in=ids) return self.queryset
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`
Python
bsd-2-clause
grapo/django-rest-framework-json-api,aquavitae/django-rest-framework-json-api,scottfisk/django-rest-framework-json-api,Instawork/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,kaldras/django-rest-framework-json-api,hnakamur/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,leifurhauks/django-rest-framework-json-api,django-json-api/rest_framework_ember,pombredanne/django-rest-framework-json-api,leo-naeka/rest_framework_ember,schtibe/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api
class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ - ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]') + if hasattr(self.request, 'query_params'): + ids = dict(self.request.query_params).get('ids[]') + else: + ids = dict(self.request.QUERY_PARAMS).get('ids[]') if ids: self.queryset = self.queryset.filter(id__in=ids) return self.queryset
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`
## Code Before: class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]') if ids: self.queryset = self.queryset.filter(id__in=ids) return self.queryset ## Instruction: Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2` ## Code After: class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params'): ids = dict(self.request.query_params).get('ids[]') else: ids = dict(self.request.QUERY_PARAMS).get('ids[]') if ids: self.queryset = self.queryset.filter(id__in=ids) return self.queryset
class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ - ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]') + if hasattr(self.request, 'query_params'): + ids = dict(self.request.query_params).get('ids[]') + else: + ids = dict(self.request.QUERY_PARAMS).get('ids[]') if ids: self.queryset = self.queryset.filter(id__in=ids) return self.queryset
828be5ee4640ddd9ee595b4ba15fa973ccbcb82f
account_fiscal_position_no_source_tax/account.py
account_fiscal_position_no_source_tax/account.py
from openerp import models, api, fields class account_fiscal_position(models.Model): _inherit = 'account.fiscal.position' @api.v8 # noqa def map_tax(self, taxes): result = super(account_fiscal_position, self).map_tax(taxes) taxes_without_src_ids = [ x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id] result += result.browse(taxes_without_src_ids) return result class account_fiscal_position_tax(models.Model): _inherit = 'account.fiscal.position.tax' tax_src_id = fields.Many2one(required=False)
from openerp import models, api, fields class account_fiscal_position(models.Model): _inherit = 'account.fiscal.position' @api.v7 def map_tax(self, cr, uid, fposition_id, taxes, context=None): result = super(account_fiscal_position, self).map_tax( cr, uid, fposition_id, taxes, context=context) taxes_without_src_ids = [ x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id] result = set(result) | set(taxes_without_src_ids) return list(result) @api.v8 # noqa def map_tax(self, taxes): result = super(account_fiscal_position, self).map_tax(taxes) taxes_without_src_ids = [ x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id] result += result.browse(taxes_without_src_ids) return result class account_fiscal_position_tax(models.Model): _inherit = 'account.fiscal.position.tax' tax_src_id = fields.Many2one(required=False)
FIX fiscal position no source tax on v7 api
FIX fiscal position no source tax on v7 api
Python
agpl-3.0
ingadhoc/partner,ingadhoc/odoo-addons,maljac/odoo-addons,bmya/odoo-addons,levkar/odoo-addons,ingadhoc/odoo-addons,ingadhoc/sale,levkar/odoo-addons,ingadhoc/account-financial-tools,sysadminmatmoz/ingadhoc,ClearCorp/account-financial-tools,HBEE/odoo-addons,jorsea/odoo-addons,sysadminmatmoz/ingadhoc,adhoc-dev/odoo-addons,jorsea/odoo-addons,ingadhoc/sale,adhoc-dev/odoo-addons,dvitme/odoo-addons,bmya/odoo-addons,ingadhoc/odoo-addons,adhoc-dev/account-financial-tools,jorsea/odoo-addons,dvitme/odoo-addons,ingadhoc/account-payment,ClearCorp/account-financial-tools,syci/ingadhoc-odoo-addons,levkar/odoo-addons,ingadhoc/account-invoicing,HBEE/odoo-addons,maljac/odoo-addons,maljac/odoo-addons,adhoc-dev/odoo-addons,bmya/odoo-addons,syci/ingadhoc-odoo-addons,ingadhoc/product,ingadhoc/product,sysadminmatmoz/ingadhoc,ingadhoc/sale,ingadhoc/sale,dvitme/odoo-addons,ingadhoc/stock,ingadhoc/account-analytic,syci/ingadhoc-odoo-addons,levkar/odoo-addons,adhoc-dev/account-financial-tools,HBEE/odoo-addons
from openerp import models, api, fields class account_fiscal_position(models.Model): _inherit = 'account.fiscal.position' + + @api.v7 + def map_tax(self, cr, uid, fposition_id, taxes, context=None): + result = super(account_fiscal_position, self).map_tax( + cr, uid, fposition_id, taxes, context=context) + taxes_without_src_ids = [ + x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id] + result = set(result) | set(taxes_without_src_ids) + return list(result) @api.v8 # noqa def map_tax(self, taxes): result = super(account_fiscal_position, self).map_tax(taxes) taxes_without_src_ids = [ x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id] result += result.browse(taxes_without_src_ids) return result class account_fiscal_position_tax(models.Model): _inherit = 'account.fiscal.position.tax' tax_src_id = fields.Many2one(required=False)
FIX fiscal position no source tax on v7 api
## Code Before: from openerp import models, api, fields class account_fiscal_position(models.Model): _inherit = 'account.fiscal.position' @api.v8 # noqa def map_tax(self, taxes): result = super(account_fiscal_position, self).map_tax(taxes) taxes_without_src_ids = [ x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id] result += result.browse(taxes_without_src_ids) return result class account_fiscal_position_tax(models.Model): _inherit = 'account.fiscal.position.tax' tax_src_id = fields.Many2one(required=False) ## Instruction: FIX fiscal position no source tax on v7 api ## Code After: from openerp import models, api, fields class account_fiscal_position(models.Model): _inherit = 'account.fiscal.position' @api.v7 def map_tax(self, cr, uid, fposition_id, taxes, context=None): result = super(account_fiscal_position, self).map_tax( cr, uid, fposition_id, taxes, context=context) taxes_without_src_ids = [ x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id] result = set(result) | set(taxes_without_src_ids) return list(result) @api.v8 # noqa def map_tax(self, taxes): result = super(account_fiscal_position, self).map_tax(taxes) taxes_without_src_ids = [ x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id] result += result.browse(taxes_without_src_ids) return result class account_fiscal_position_tax(models.Model): _inherit = 'account.fiscal.position.tax' tax_src_id = fields.Many2one(required=False)
from openerp import models, api, fields class account_fiscal_position(models.Model): _inherit = 'account.fiscal.position' + + @api.v7 + def map_tax(self, cr, uid, fposition_id, taxes, context=None): + result = super(account_fiscal_position, self).map_tax( + cr, uid, fposition_id, taxes, context=context) + taxes_without_src_ids = [ + x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id] + result = set(result) | set(taxes_without_src_ids) + return list(result) @api.v8 # noqa def map_tax(self, taxes): result = super(account_fiscal_position, self).map_tax(taxes) taxes_without_src_ids = [ x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id] result += result.browse(taxes_without_src_ids) return result class account_fiscal_position_tax(models.Model): _inherit = 'account.fiscal.position.tax' tax_src_id = fields.Many2one(required=False)
d6940b3ff80190f87bf7d5336b9c54dc160da12a
helpers/team_manipulator.py
helpers/team_manipulator.py
from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
Remove references to NdbManipulatorBase, which never really happened.
Remove references to NdbManipulatorBase, which never really happened.
Python
mit
1fish2/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance
- from helpers.ndb_manipulator_base import NdbManipulatorBase + from helpers.manipulator_base import ManipulatorBase - class TeamManipulator(NdbManipulatorBase): + class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
Remove references to NdbManipulatorBase, which never really happened.
## Code Before: from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team ## Instruction: Remove references to NdbManipulatorBase, which never really happened. ## Code After: from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
- from helpers.ndb_manipulator_base import NdbManipulatorBase ? ---- --- + from helpers.manipulator_base import ManipulatorBase - class TeamManipulator(NdbManipulatorBase): ? --- + class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
7d94abed2316c5ee6679f33d43c122b9bfcedab7
extra_countries/migrations/0001_initial.py
extra_countries/migrations/0001_initial.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('continents', '0001_initial'), ('currencies', '0001_initial'), ('cities', '0002_auto_20151112_1857'), ] operations = [ migrations.CreateModel( name='ExtraCountry', fields=[ ('code', models.CharField(serialize=False, primary_key=True, max_length=3)), ('country', models.OneToOneField(to='cities.Country')), ('extra_continent', models.ForeignKey(to='continents.Continent', null=True)), ('extra_currency', models.ForeignKey(to='currencies.Currency', null=True)), ], ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('continents', '0001_initial'), ('currencies', '0001_initial'), ] operations = [ migrations.CreateModel( name='ExtraCountry', fields=[ ('code', models.CharField(serialize=False, primary_key=True, max_length=3)), ('country', models.OneToOneField(to='cities.Country')), ('extra_continent', models.ForeignKey(to='continents.Continent', null=True)), ('extra_currency', models.ForeignKey(to='currencies.Currency', null=True)), ], ), ]
Remove reference to nonexistent migration to fix tests
Remove reference to nonexistent migration to fix tests
Python
mit
openspending/cosmopolitan,kiote/cosmopolitan
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('continents', '0001_initial'), ('currencies', '0001_initial'), - ('cities', '0002_auto_20151112_1857'), ] operations = [ migrations.CreateModel( name='ExtraCountry', fields=[ ('code', models.CharField(serialize=False, primary_key=True, max_length=3)), ('country', models.OneToOneField(to='cities.Country')), ('extra_continent', models.ForeignKey(to='continents.Continent', null=True)), ('extra_currency', models.ForeignKey(to='currencies.Currency', null=True)), ], ), ]
Remove reference to nonexistent migration to fix tests
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('continents', '0001_initial'), ('currencies', '0001_initial'), ('cities', '0002_auto_20151112_1857'), ] operations = [ migrations.CreateModel( name='ExtraCountry', fields=[ ('code', models.CharField(serialize=False, primary_key=True, max_length=3)), ('country', models.OneToOneField(to='cities.Country')), ('extra_continent', models.ForeignKey(to='continents.Continent', null=True)), ('extra_currency', models.ForeignKey(to='currencies.Currency', null=True)), ], ), ] ## Instruction: Remove reference to nonexistent migration to fix tests ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('continents', '0001_initial'), ('currencies', '0001_initial'), ] operations = [ migrations.CreateModel( name='ExtraCountry', fields=[ ('code', models.CharField(serialize=False, primary_key=True, max_length=3)), ('country', models.OneToOneField(to='cities.Country')), ('extra_continent', models.ForeignKey(to='continents.Continent', null=True)), ('extra_currency', models.ForeignKey(to='currencies.Currency', null=True)), ], ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('continents', '0001_initial'), ('currencies', '0001_initial'), - ('cities', '0002_auto_20151112_1857'), ] operations = [ migrations.CreateModel( name='ExtraCountry', fields=[ ('code', models.CharField(serialize=False, primary_key=True, max_length=3)), ('country', models.OneToOneField(to='cities.Country')), ('extra_continent', models.ForeignKey(to='continents.Continent', null=True)), ('extra_currency', models.ForeignKey(to='currencies.Currency', null=True)), ], ), ]
a20a63415bf1343ab826d1155c1004e84b14077e
massa/validation.py
massa/validation.py
from schematics.exceptions import ConversionError, ValidationError def validate(schema, data): try: schema.import_data(data) schema.validate() except (ConversionError, ValidationError) as e: raise InvalidInputError(details=e.messages) def weight_validator(value): if abs(value.as_tuple().exponent) > 1: raise ValidationError('Only one decimal point is allowed.') return value
from schematics.exceptions import ConversionError, ValidationError from .errors import InvalidInputError def validate(schema, data): try: schema.import_data(data) schema.validate() except (ConversionError, ValidationError) as e: raise InvalidInputError(details=e.messages) def weight_validator(value): if abs(value.as_tuple().exponent) > 1: raise ValidationError('Only one decimal point is allowed.') return value
Fix bug, InvalidInputError not defined.
Fix bug, InvalidInputError not defined.
Python
mit
jaapverloop/massa
from schematics.exceptions import ConversionError, ValidationError + from .errors import InvalidInputError def validate(schema, data): try: schema.import_data(data) schema.validate() except (ConversionError, ValidationError) as e: raise InvalidInputError(details=e.messages) def weight_validator(value): if abs(value.as_tuple().exponent) > 1: raise ValidationError('Only one decimal point is allowed.') return value
Fix bug, InvalidInputError not defined.
## Code Before: from schematics.exceptions import ConversionError, ValidationError def validate(schema, data): try: schema.import_data(data) schema.validate() except (ConversionError, ValidationError) as e: raise InvalidInputError(details=e.messages) def weight_validator(value): if abs(value.as_tuple().exponent) > 1: raise ValidationError('Only one decimal point is allowed.') return value ## Instruction: Fix bug, InvalidInputError not defined. ## Code After: from schematics.exceptions import ConversionError, ValidationError from .errors import InvalidInputError def validate(schema, data): try: schema.import_data(data) schema.validate() except (ConversionError, ValidationError) as e: raise InvalidInputError(details=e.messages) def weight_validator(value): if abs(value.as_tuple().exponent) > 1: raise ValidationError('Only one decimal point is allowed.') return value
from schematics.exceptions import ConversionError, ValidationError + from .errors import InvalidInputError def validate(schema, data): try: schema.import_data(data) schema.validate() except (ConversionError, ValidationError) as e: raise InvalidInputError(details=e.messages) def weight_validator(value): if abs(value.as_tuple().exponent) > 1: raise ValidationError('Only one decimal point is allowed.') return value
43238d0de9e4d6d4909b4d67c17449a9599e5dac
mygpo/web/templatetags/time.py
mygpo/web/templatetags/time.py
from datetime import time from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django import template register = template.Library() @register.filter def sec_to_time(sec): """ Converts seconds to a time object >>> t = sec_to_time(1000) >>> (t.hour, t.minute, t.second) (0, 16, 40) """ s = int(sec) hour = int(s / 60 / 60) minute = int((s / 60) % 60) sec = int(s % 60 ) return time(hour, minute, sec) @register.filter @mark_safe def format_duration(sec): """ Converts seconds into a duration string >>> format_duration(1000) '0h 16m 40s' """ hours = int(sec / 60 / 60) minutes = int((sec / 60) % 60) seconds = int(sec % 60) return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
from datetime import time from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django import template register = template.Library() @register.filter def sec_to_time(sec): """ Converts seconds to a time object >>> t = sec_to_time(1000) >>> (t.hour, t.minute, t.second) (0, 16, 40) """ s = int(sec) hour = int(s / 60 / 60) minute = int((s / 60) % 60) sec = int(s % 60 ) return time(hour, minute, sec) @register.filter @mark_safe def format_duration(sec): """ Converts seconds into a duration string >>> format_duration(1000) '16m 40s' >>> format_duration(10009) '2h 46m 49s' """ hours = int(sec / 60 / 60) minutes = int((sec / 60) % 60) seconds = int(sec % 60) if hours: return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) else: return _('{m}m {s}s').format(m=minutes, s=seconds)
Format short durations without "0 hours"
Format short durations without "0 hours"
Python
agpl-3.0
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
from datetime import time from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django import template register = template.Library() @register.filter def sec_to_time(sec): """ Converts seconds to a time object >>> t = sec_to_time(1000) >>> (t.hour, t.minute, t.second) (0, 16, 40) """ s = int(sec) hour = int(s / 60 / 60) minute = int((s / 60) % 60) sec = int(s % 60 ) return time(hour, minute, sec) @register.filter @mark_safe def format_duration(sec): """ Converts seconds into a duration string >>> format_duration(1000) - '0h 16m 40s' + '16m 40s' + >>> format_duration(10009) + '2h 46m 49s' """ hours = int(sec / 60 / 60) minutes = int((sec / 60) % 60) seconds = int(sec % 60) - return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) + if hours: + return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) + else: + return _('{m}m {s}s').format(m=minutes, s=seconds) +
Format short durations without "0 hours"
## Code Before: from datetime import time from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django import template register = template.Library() @register.filter def sec_to_time(sec): """ Converts seconds to a time object >>> t = sec_to_time(1000) >>> (t.hour, t.minute, t.second) (0, 16, 40) """ s = int(sec) hour = int(s / 60 / 60) minute = int((s / 60) % 60) sec = int(s % 60 ) return time(hour, minute, sec) @register.filter @mark_safe def format_duration(sec): """ Converts seconds into a duration string >>> format_duration(1000) '0h 16m 40s' """ hours = int(sec / 60 / 60) minutes = int((sec / 60) % 60) seconds = int(sec % 60) return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) ## Instruction: Format short durations without "0 hours" ## Code After: from datetime import time from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django import template register = template.Library() @register.filter def sec_to_time(sec): """ Converts seconds to a time object >>> t = sec_to_time(1000) >>> (t.hour, t.minute, t.second) (0, 16, 40) """ s = int(sec) hour = int(s / 60 / 60) minute = int((s / 60) % 60) sec = int(s % 60 ) return time(hour, minute, sec) @register.filter @mark_safe def format_duration(sec): """ Converts seconds into a duration string >>> format_duration(1000) '16m 40s' >>> format_duration(10009) '2h 46m 49s' """ hours = int(sec / 60 / 60) minutes = int((sec / 60) % 60) seconds = int(sec % 60) if hours: return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) else: return _('{m}m {s}s').format(m=minutes, s=seconds)
from datetime import time from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django import template register = template.Library() @register.filter def sec_to_time(sec): """ Converts seconds to a time object >>> t = sec_to_time(1000) >>> (t.hour, t.minute, t.second) (0, 16, 40) """ s = int(sec) hour = int(s / 60 / 60) minute = int((s / 60) % 60) sec = int(s % 60 ) return time(hour, minute, sec) @register.filter @mark_safe def format_duration(sec): """ Converts seconds into a duration string >>> format_duration(1000) - '0h 16m 40s' ? --- + '16m 40s' + >>> format_duration(10009) + '2h 46m 49s' """ hours = int(sec / 60 / 60) minutes = int((sec / 60) % 60) seconds = int(sec % 60) + + if hours: - return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) + return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds) ? ++++ + else: + return _('{m}m {s}s').format(m=minutes, s=seconds)
ee2d27eca45768a07a562405cf4431cb8d2b09bf
setup.py
setup.py
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Naveen Malik', maintainer_email='[email protected]', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','pycurl_benchmark','test_resttest'], license='Apache License, Version 2.0' )
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Sam Van Oort', maintainer_email='[email protected]', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','test_resttest'], license='Apache License, Version 2.0', requires=['argparse','yaml','pycurl'] )
Set maintainer and add dependencies to distutils config
Set maintainer and add dependencies to distutils config
Python
apache-2.0
sunyanhui/pyresttest,satish-suradkar/pyresttest,suvarnaraju/pyresttest,wirewit/pyresttest,netjunki/pyresttest,MorrisJobke/pyresttest,wirewit/pyresttest,suvarnaraju/pyresttest,svanoort/pyresttest,alazaro/pyresttest,sunyanhui/pyresttest,TimYi/pyresttest,MorrisJobke/pyresttest,holdenweb/pyresttest,TimYi/pyresttest,alazaro/pyresttest,janusnic/pyresttest,janusnic/pyresttest,holdenweb/pyresttest,netjunki/pyresttest,svanoort/pyresttest,satish-suradkar/pyresttest
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', - maintainer='Naveen Malik', + maintainer='Sam Van Oort', - maintainer_email='[email protected]', + maintainer_email='[email protected]', url='https://github.com/svanoort/pyresttest', - py_modules=['resttest','pycurl_benchmark','test_resttest'], + py_modules=['resttest','test_resttest'], - license='Apache License, Version 2.0' + license='Apache License, Version 2.0', + requires=['argparse','yaml','pycurl'] )
Set maintainer and add dependencies to distutils config
## Code Before: from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Naveen Malik', maintainer_email='[email protected]', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','pycurl_benchmark','test_resttest'], license='Apache License, Version 2.0' ) ## Instruction: Set maintainer and add dependencies to distutils config ## Code After: from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Sam Van Oort', maintainer_email='[email protected]', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','test_resttest'], license='Apache License, Version 2.0', requires=['argparse','yaml','pycurl'] )
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', - maintainer='Naveen Malik', + maintainer='Sam Van Oort', - maintainer_email='[email protected]', ? ^ ^^^ + maintainer_email='[email protected]', ? ^^ ^^^^^^ url='https://github.com/svanoort/pyresttest', - py_modules=['resttest','pycurl_benchmark','test_resttest'], ? ------------------- + py_modules=['resttest','test_resttest'], - license='Apache License, Version 2.0' + license='Apache License, Version 2.0', ? + + requires=['argparse','yaml','pycurl'] )
602d1ceb755d5d74312e965b5515bbe22c868fd4
sale_commission_pricelist/models/sale_order.py
sale_commission_pricelist/models/sale_order.py
from odoo import api, models class SaleOrderLine(models.Model): _inherit = 'sale.order.line' @api.onchange('product_id', 'product_uom_qty') def _onchange_product_id_sale_commission_pricelist(self): self.ensure_one() if self.product_id and self.order_id.pricelist_id: rule_id = self.order_id.pricelist_id.get_product_price_rule( product=self.product_id, quantity=self.product_uom_qty or 1.0, partner=self.order_id.partner_id, date=self.order_id.date_order, uom_id=self.product_uom.id)[1] rule = self.env['product.pricelist.item'].browse(rule_id) if rule.commission_id: self.agents.update({ 'commission': rule.commission_id.id, })
from odoo import api, models class SaleOrderLine(models.Model): _inherit = 'sale.order.line' def _get_commission_from_pricelist(self): self.ensure_one() if not self.product_id or not self.order_id.pricelist_id: return False rule_id = self.order_id.pricelist_id.get_product_price_rule( product=self.product_id, quantity=self.product_uom_qty or 1.0, partner=self.order_id.partner_id, date=self.order_id.date_order, uom_id=self.product_uom.id)[1] rule = self.env['product.pricelist.item'].browse(rule_id) return rule.commission_id @api.onchange('product_id', 'product_uom_qty') def _onchange_product_id_sale_commission_pricelist(self): commission = self._get_commission_from_pricelist() if commission: self.agents.update({ 'commission': commission.id, }) def _prepare_agents_vals(self): self.ensure_one() res = super(SaleOrderLine, self)._prepare_agents_vals() commission = self._get_commission_from_pricelist() if commission: for vals in res: vals['commission'] = commission.id return res
Make this to work on button recompute
[FIX] sale_commission_pricelist: Make this to work on button recompute
Python
agpl-3.0
OCA/commission,OCA/commission
from odoo import api, models class SaleOrderLine(models.Model): _inherit = 'sale.order.line' + def _get_commission_from_pricelist(self): + self.ensure_one() + if not self.product_id or not self.order_id.pricelist_id: + return False + rule_id = self.order_id.pricelist_id.get_product_price_rule( + product=self.product_id, + quantity=self.product_uom_qty or 1.0, + partner=self.order_id.partner_id, + date=self.order_id.date_order, + uom_id=self.product_uom.id)[1] + rule = self.env['product.pricelist.item'].browse(rule_id) + return rule.commission_id + @api.onchange('product_id', 'product_uom_qty') def _onchange_product_id_sale_commission_pricelist(self): + commission = self._get_commission_from_pricelist() + if commission: + self.agents.update({ + 'commission': commission.id, + }) + + def _prepare_agents_vals(self): self.ensure_one() + res = super(SaleOrderLine, self)._prepare_agents_vals() + commission = self._get_commission_from_pricelist() - if self.product_id and self.order_id.pricelist_id: - rule_id = self.order_id.pricelist_id.get_product_price_rule( - product=self.product_id, - quantity=self.product_uom_qty or 1.0, - partner=self.order_id.partner_id, - date=self.order_id.date_order, - uom_id=self.product_uom.id)[1] - rule = self.env['product.pricelist.item'].browse(rule_id) - if rule.commission_id: + if commission: - self.agents.update({ + for vals in res: - 'commission': rule.commission_id.id, + vals['commission'] = commission.id - }) + return res
Make this to work on button recompute
## Code Before: from odoo import api, models class SaleOrderLine(models.Model): _inherit = 'sale.order.line' @api.onchange('product_id', 'product_uom_qty') def _onchange_product_id_sale_commission_pricelist(self): self.ensure_one() if self.product_id and self.order_id.pricelist_id: rule_id = self.order_id.pricelist_id.get_product_price_rule( product=self.product_id, quantity=self.product_uom_qty or 1.0, partner=self.order_id.partner_id, date=self.order_id.date_order, uom_id=self.product_uom.id)[1] rule = self.env['product.pricelist.item'].browse(rule_id) if rule.commission_id: self.agents.update({ 'commission': rule.commission_id.id, }) ## Instruction: Make this to work on button recompute ## Code After: from odoo import api, models class SaleOrderLine(models.Model): _inherit = 'sale.order.line' def _get_commission_from_pricelist(self): self.ensure_one() if not self.product_id or not self.order_id.pricelist_id: return False rule_id = self.order_id.pricelist_id.get_product_price_rule( product=self.product_id, quantity=self.product_uom_qty or 1.0, partner=self.order_id.partner_id, date=self.order_id.date_order, uom_id=self.product_uom.id)[1] rule = self.env['product.pricelist.item'].browse(rule_id) return rule.commission_id @api.onchange('product_id', 'product_uom_qty') def _onchange_product_id_sale_commission_pricelist(self): commission = self._get_commission_from_pricelist() if commission: self.agents.update({ 'commission': commission.id, }) def _prepare_agents_vals(self): self.ensure_one() res = super(SaleOrderLine, self)._prepare_agents_vals() commission = self._get_commission_from_pricelist() if commission: for vals in res: vals['commission'] = commission.id return res
from odoo import api, models class SaleOrderLine(models.Model): _inherit = 'sale.order.line' + def _get_commission_from_pricelist(self): + self.ensure_one() + if not self.product_id or not self.order_id.pricelist_id: + return False + rule_id = self.order_id.pricelist_id.get_product_price_rule( + product=self.product_id, + quantity=self.product_uom_qty or 1.0, + partner=self.order_id.partner_id, + date=self.order_id.date_order, + uom_id=self.product_uom.id)[1] + rule = self.env['product.pricelist.item'].browse(rule_id) + return rule.commission_id + @api.onchange('product_id', 'product_uom_qty') def _onchange_product_id_sale_commission_pricelist(self): + commission = self._get_commission_from_pricelist() + if commission: + self.agents.update({ + 'commission': commission.id, + }) + + def _prepare_agents_vals(self): self.ensure_one() + res = super(SaleOrderLine, self)._prepare_agents_vals() + commission = self._get_commission_from_pricelist() - if self.product_id and self.order_id.pricelist_id: - rule_id = self.order_id.pricelist_id.get_product_price_rule( - product=self.product_id, - quantity=self.product_uom_qty or 1.0, - partner=self.order_id.partner_id, - date=self.order_id.date_order, - uom_id=self.product_uom.id)[1] - rule = self.env['product.pricelist.item'].browse(rule_id) - if rule.commission_id: ? ---- ----- --- + if commission: - self.agents.update({ + for vals in res: - 'commission': rule.commission_id.id, ? ^^^^ ^ ^^^^^ --- - + vals['commission'] = commission.id ? ^^^^^ ^ ^^ - }) + return res
89f7678aa065d70d12d880ddaa7c22bbab2e84a8
scripts/install.py
scripts/install.py
import subprocess def run(command, *args, **kwargs): print("+ {}".format(command)) subprocess.run(command, *args, **kwargs) run("git submodule update --init", shell=True) run("pip install -e magma", shell=True) run("pip install -e mantle", shell=True) run("pip install -e loam", shell=True) run("pip install fabricate", shell=True)
import subprocess def run(command, *args, **kwargs): print("+ {}".format(command)) subprocess.run(command, *args, **kwargs) run("git submodule update --init", shell=True) run("pip install -e magma", shell=True) run("pip install -e mantle", shell=True) run("pip install -e loam", shell=True) run("pip install fabricate", shell=True) run("pip install jupyter", shell=True)
Install jupyter so people can follow along with notebooks
Install jupyter so people can follow along with notebooks
Python
mit
phanrahan/magmathon,phanrahan/magmathon
import subprocess def run(command, *args, **kwargs): print("+ {}".format(command)) subprocess.run(command, *args, **kwargs) run("git submodule update --init", shell=True) run("pip install -e magma", shell=True) run("pip install -e mantle", shell=True) run("pip install -e loam", shell=True) run("pip install fabricate", shell=True) + run("pip install jupyter", shell=True)
Install jupyter so people can follow along with notebooks
## Code Before: import subprocess def run(command, *args, **kwargs): print("+ {}".format(command)) subprocess.run(command, *args, **kwargs) run("git submodule update --init", shell=True) run("pip install -e magma", shell=True) run("pip install -e mantle", shell=True) run("pip install -e loam", shell=True) run("pip install fabricate", shell=True) ## Instruction: Install jupyter so people can follow along with notebooks ## Code After: import subprocess def run(command, *args, **kwargs): print("+ {}".format(command)) subprocess.run(command, *args, **kwargs) run("git submodule update --init", shell=True) run("pip install -e magma", shell=True) run("pip install -e mantle", shell=True) run("pip install -e loam", shell=True) run("pip install fabricate", shell=True) run("pip install jupyter", shell=True)
import subprocess def run(command, *args, **kwargs): print("+ {}".format(command)) subprocess.run(command, *args, **kwargs) run("git submodule update --init", shell=True) run("pip install -e magma", shell=True) run("pip install -e mantle", shell=True) run("pip install -e loam", shell=True) run("pip install fabricate", shell=True) + run("pip install jupyter", shell=True)
cd1b68aaaefffc15ce10789445d7749c99deb3d4
shingen/generators/hosts.py
shingen/generators/hosts.py
from ..shinkenconfig import ConfigObject def generate_host_config(config, project_name, instance): co = ConfigObject('host') co.properties['use'] = 'generic-host' co.properties['host_name'] = instance['name'] co.properties['address'] = instance['ip'][0] projects = [project_name, config.get('default-hostgroup', 'labshost')] co.properties['hostgroups'] = ','.join(projects) co.properties['contact_groups'] = project_name return co
from ..shinkenconfig import ConfigObject def generate_host_config(config, project_name, instance): co = ConfigObject('host') co.properties['use'] = 'generic-host' co.properties['host_name'] = instance['name'] co.properties['address'] = instance['ip'][0] projects = [project_name, config.get('default-hostgroup', 'labshost')] co.properties['hostgroups'] = ','.join(projects) co.properties['contact_groups'] = project_name co.properties['notes'] = project_name # Used for auto deriving graphite path return co
Put project name in 'notes' field of host
Put project name in 'notes' field of host Labs' graphite metrics architecture means we need both the project name and the hostname to find a full path to our host. Abusing this field for that purpose. Change-Id: If097526f413f36407acdff852cc81216f9c84556
Python
apache-2.0
wikimedia/operations-software-shinkengen
from ..shinkenconfig import ConfigObject def generate_host_config(config, project_name, instance): co = ConfigObject('host') co.properties['use'] = 'generic-host' co.properties['host_name'] = instance['name'] co.properties['address'] = instance['ip'][0] projects = [project_name, config.get('default-hostgroup', 'labshost')] co.properties['hostgroups'] = ','.join(projects) co.properties['contact_groups'] = project_name + co.properties['notes'] = project_name # Used for auto deriving graphite path return co
Put project name in 'notes' field of host
## Code Before: from ..shinkenconfig import ConfigObject def generate_host_config(config, project_name, instance): co = ConfigObject('host') co.properties['use'] = 'generic-host' co.properties['host_name'] = instance['name'] co.properties['address'] = instance['ip'][0] projects = [project_name, config.get('default-hostgroup', 'labshost')] co.properties['hostgroups'] = ','.join(projects) co.properties['contact_groups'] = project_name return co ## Instruction: Put project name in 'notes' field of host ## Code After: from ..shinkenconfig import ConfigObject def generate_host_config(config, project_name, instance): co = ConfigObject('host') co.properties['use'] = 'generic-host' co.properties['host_name'] = instance['name'] co.properties['address'] = instance['ip'][0] projects = [project_name, config.get('default-hostgroup', 'labshost')] co.properties['hostgroups'] = ','.join(projects) co.properties['contact_groups'] = project_name co.properties['notes'] = project_name # Used for auto deriving graphite path return co
from ..shinkenconfig import ConfigObject def generate_host_config(config, project_name, instance): co = ConfigObject('host') co.properties['use'] = 'generic-host' co.properties['host_name'] = instance['name'] co.properties['address'] = instance['ip'][0] projects = [project_name, config.get('default-hostgroup', 'labshost')] co.properties['hostgroups'] = ','.join(projects) co.properties['contact_groups'] = project_name + co.properties['notes'] = project_name # Used for auto deriving graphite path return co
9796e60975474006940af723a6cb8b16bc632ae0
tz_app/context_processors.py
tz_app/context_processors.py
from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): alt_timezone = request.session.get('alt_timezone', pytz.utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
Fix a bug when pytz isn't installed.
Fix a bug when pytz isn't installed.
Python
bsd-3-clause
aaugustin/django-tz-demo
from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): - alt_timezone = request.session.get('alt_timezone', pytz.utc) + alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
Fix a bug when pytz isn't installed.
## Code Before: from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): alt_timezone = request.session.get('alt_timezone', pytz.utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', } ## Instruction: Fix a bug when pytz isn't installed. ## Code After: from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): - alt_timezone = request.session.get('alt_timezone', pytz.utc) + alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc) ? + +++++++++++++ return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
a14a911ae49d8354f61426cee2925b2a24a9b521
Alerters/nc.py
Alerters/nc.py
try: import pync pync_available = True except ImportError: pync_available = False from .alerter import Alerter class NotificationCenterAlerter(Alerter): """Send alerts to the Mac OS X Notification Center.""" def __init__(self, config_options): Alerter.__init__(self, config_options) if not pync_available: self.alerter_logger.critical("Pync package is not available, cannot use NotificationCenterAlerter.") self.alerter_logger.critical("Try: pip install -r requirements.txt") return def send_alert(self, name, monitor): """Send the message.""" alert_type = self.should_alert(monitor) message = "" if alert_type == "": return elif alert_type == "failure": message = "Monitor {} failed!".format(name) elif alert_type == "success": message = "Monitor {} succeeded.".format(name) else: self.alerter_logger.error("Unknown alert type: {}".format(alert_type)) return if not self.dry_run: pync.notify(message=message, title="SimpleMonitor") else: self.alerter_logger.info("dry_run: would send message: {}".format(message))
try: import pync pync_available = True except ImportError: pync_available = False import platform from .alerter import Alerter class NotificationCenterAlerter(Alerter): """Send alerts to the Mac OS X Notification Center.""" def __init__(self, config_options): Alerter.__init__(self, config_options) if not pync_available: self.alerter_logger.critical("Pync package is not available, which is necessary to use NotificationCenterAlerter.") self.alerter_logger.critical("Try: pip install -r requirements.txt") return if platform.system() != "Darwin": self.alerter_logger.critical("This alerter (currently) only works on Mac OS X!") return def send_alert(self, name, monitor): """Send the message.""" alert_type = self.should_alert(monitor) message = "" if alert_type == "": return elif alert_type == "failure": message = "Monitor {} failed!".format(name) elif alert_type == "success": message = "Monitor {} succeeded.".format(name) else: self.alerter_logger.error("Unknown alert type: {}".format(alert_type)) return if not self.dry_run: pync.notify(message=message, title="SimpleMonitor") else: self.alerter_logger.info("dry_run: would send message: {}".format(message))
Add check for running on Mac OS X
Add check for running on Mac OS X
Python
bsd-3-clause
jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor
try: import pync pync_available = True except ImportError: pync_available = False + + import platform from .alerter import Alerter class NotificationCenterAlerter(Alerter): """Send alerts to the Mac OS X Notification Center.""" def __init__(self, config_options): Alerter.__init__(self, config_options) if not pync_available: - self.alerter_logger.critical("Pync package is not available, cannot use NotificationCenterAlerter.") + self.alerter_logger.critical("Pync package is not available, which is necessary to use NotificationCenterAlerter.") self.alerter_logger.critical("Try: pip install -r requirements.txt") + return + + if platform.system() != "Darwin": + self.alerter_logger.critical("This alerter (currently) only works on Mac OS X!") return def send_alert(self, name, monitor): """Send the message.""" alert_type = self.should_alert(monitor) message = "" if alert_type == "": return elif alert_type == "failure": message = "Monitor {} failed!".format(name) elif alert_type == "success": message = "Monitor {} succeeded.".format(name) else: self.alerter_logger.error("Unknown alert type: {}".format(alert_type)) return if not self.dry_run: pync.notify(message=message, title="SimpleMonitor") else: self.alerter_logger.info("dry_run: would send message: {}".format(message))
Add check for running on Mac OS X
## Code Before: try: import pync pync_available = True except ImportError: pync_available = False from .alerter import Alerter class NotificationCenterAlerter(Alerter): """Send alerts to the Mac OS X Notification Center.""" def __init__(self, config_options): Alerter.__init__(self, config_options) if not pync_available: self.alerter_logger.critical("Pync package is not available, cannot use NotificationCenterAlerter.") self.alerter_logger.critical("Try: pip install -r requirements.txt") return def send_alert(self, name, monitor): """Send the message.""" alert_type = self.should_alert(monitor) message = "" if alert_type == "": return elif alert_type == "failure": message = "Monitor {} failed!".format(name) elif alert_type == "success": message = "Monitor {} succeeded.".format(name) else: self.alerter_logger.error("Unknown alert type: {}".format(alert_type)) return if not self.dry_run: pync.notify(message=message, title="SimpleMonitor") else: self.alerter_logger.info("dry_run: would send message: {}".format(message)) ## Instruction: Add check for running on Mac OS X ## Code After: try: import pync pync_available = True except ImportError: pync_available = False import platform from .alerter import Alerter class NotificationCenterAlerter(Alerter): """Send alerts to the Mac OS X Notification Center.""" def __init__(self, config_options): Alerter.__init__(self, config_options) if not pync_available: self.alerter_logger.critical("Pync package is not available, which is necessary to use NotificationCenterAlerter.") self.alerter_logger.critical("Try: pip install -r requirements.txt") return if platform.system() != "Darwin": self.alerter_logger.critical("This alerter (currently) only works on Mac OS X!") return def send_alert(self, name, monitor): """Send the message.""" alert_type = self.should_alert(monitor) message = "" if alert_type == "": return elif alert_type == "failure": message = "Monitor {} failed!".format(name) elif alert_type == "success": message = "Monitor {} succeeded.".format(name) else: self.alerter_logger.error("Unknown alert type: {}".format(alert_type)) return if not self.dry_run: pync.notify(message=message, title="SimpleMonitor") else: self.alerter_logger.info("dry_run: would send message: {}".format(message))
try: import pync pync_available = True except ImportError: pync_available = False + + import platform from .alerter import Alerter class NotificationCenterAlerter(Alerter): """Send alerts to the Mac OS X Notification Center.""" def __init__(self, config_options): Alerter.__init__(self, config_options) if not pync_available: - self.alerter_logger.critical("Pync package is not available, cannot use NotificationCenterAlerter.") ? ^^ - + self.alerter_logger.critical("Pync package is not available, which is necessary to use NotificationCenterAlerter.") ? +++ +++++++++++ ^^^^ self.alerter_logger.critical("Try: pip install -r requirements.txt") + return + + if platform.system() != "Darwin": + self.alerter_logger.critical("This alerter (currently) only works on Mac OS X!") return def send_alert(self, name, monitor): """Send the message.""" alert_type = self.should_alert(monitor) message = "" if alert_type == "": return elif alert_type == "failure": message = "Monitor {} failed!".format(name) elif alert_type == "success": message = "Monitor {} succeeded.".format(name) else: self.alerter_logger.error("Unknown alert type: {}".format(alert_type)) return if not self.dry_run: pync.notify(message=message, title="SimpleMonitor") else: self.alerter_logger.info("dry_run: would send message: {}".format(message))
b6e9215457eba813f91c9eb4a8b96f8652bcd5fc
functional_tests/pages/settings.py
functional_tests/pages/settings.py
from selenium.webdriver.support.ui import Select from page_objects import PageObject, PageElement, MultiPageElement class SettingsPage(PageObject): return_link = PageElement(css='.mui--text-title a.appbar-correct') inlist_delete_confirm = PageElement(name='inlist_delete_confirm') action_delete_confirm = PageElement(name='action_delete_confirm') confirm = PageElement(name='confirm') content = PageElement(id_='content') sidebar = PageElement(id_='sidebar') sidebar_return_link = PageElement(css='#sidebar a#return') _settings_list = MultiPageElement(tag_name='label') @property def settings_list(self): return [setting.text for setting in self._settings_list] _language_elem = PageElement(name='language') @property def language(self): return Select(self._language_elem)
from selenium.webdriver.support.ui import Select from page_objects import PageObject, PageElement, MultiPageElement class SettingsPage(PageObject): return_link = PageElement(css='#sidebar-brand a') inlist_delete_confirm = PageElement(name='inlist_delete_confirm') action_delete_confirm = PageElement(name='action_delete_confirm') confirm = PageElement(name='confirm') content = PageElement(id_='content') sidebar = PageElement(id_='sidebar') sidebar_return_link = PageElement(css='#sidebar a#return') _settings_list = MultiPageElement(tag_name='label') @property def settings_list(self): return [setting.text for setting in self._settings_list] _language_elem = PageElement(name='language') @property def language(self): return Select(self._language_elem)
Make the return link work again
Make the return link work again
Python
mit
XeryusTC/projman,XeryusTC/projman,XeryusTC/projman
from selenium.webdriver.support.ui import Select from page_objects import PageObject, PageElement, MultiPageElement class SettingsPage(PageObject): - return_link = PageElement(css='.mui--text-title a.appbar-correct') + return_link = PageElement(css='#sidebar-brand a') inlist_delete_confirm = PageElement(name='inlist_delete_confirm') action_delete_confirm = PageElement(name='action_delete_confirm') confirm = PageElement(name='confirm') content = PageElement(id_='content') sidebar = PageElement(id_='sidebar') sidebar_return_link = PageElement(css='#sidebar a#return') _settings_list = MultiPageElement(tag_name='label') @property def settings_list(self): return [setting.text for setting in self._settings_list] _language_elem = PageElement(name='language') @property def language(self): return Select(self._language_elem)
Make the return link work again
## Code Before: from selenium.webdriver.support.ui import Select from page_objects import PageObject, PageElement, MultiPageElement class SettingsPage(PageObject): return_link = PageElement(css='.mui--text-title a.appbar-correct') inlist_delete_confirm = PageElement(name='inlist_delete_confirm') action_delete_confirm = PageElement(name='action_delete_confirm') confirm = PageElement(name='confirm') content = PageElement(id_='content') sidebar = PageElement(id_='sidebar') sidebar_return_link = PageElement(css='#sidebar a#return') _settings_list = MultiPageElement(tag_name='label') @property def settings_list(self): return [setting.text for setting in self._settings_list] _language_elem = PageElement(name='language') @property def language(self): return Select(self._language_elem) ## Instruction: Make the return link work again ## Code After: from selenium.webdriver.support.ui import Select from page_objects import PageObject, PageElement, MultiPageElement class SettingsPage(PageObject): return_link = PageElement(css='#sidebar-brand a') inlist_delete_confirm = PageElement(name='inlist_delete_confirm') action_delete_confirm = PageElement(name='action_delete_confirm') confirm = PageElement(name='confirm') content = PageElement(id_='content') sidebar = PageElement(id_='sidebar') sidebar_return_link = PageElement(css='#sidebar a#return') _settings_list = MultiPageElement(tag_name='label') @property def settings_list(self): return [setting.text for setting in self._settings_list] _language_elem = PageElement(name='language') @property def language(self): return Select(self._language_elem)
from selenium.webdriver.support.ui import Select from page_objects import PageObject, PageElement, MultiPageElement class SettingsPage(PageObject): - return_link = PageElement(css='.mui--text-title a.appbar-correct') + return_link = PageElement(css='#sidebar-brand a') inlist_delete_confirm = PageElement(name='inlist_delete_confirm') action_delete_confirm = PageElement(name='action_delete_confirm') confirm = PageElement(name='confirm') content = PageElement(id_='content') sidebar = PageElement(id_='sidebar') sidebar_return_link = PageElement(css='#sidebar a#return') _settings_list = MultiPageElement(tag_name='label') @property def settings_list(self): return [setting.text for setting in self._settings_list] _language_elem = PageElement(name='language') @property def language(self): return Select(self._language_elem)
6690479e46c9138c6f57ce9415b0429175545e96
stock_transfer_restrict_lot/models/stock_production_lot.py
stock_transfer_restrict_lot/models/stock_production_lot.py
from openerp import models, fields, api class StockProductionlot(models.Model): _inherit = 'stock.production.lot' qty_available_not_res = fields.Float( compute='_compute_qty_available_not_res', string='Qty Available Not Reserved', store=True ) @api.multi @api.depends('quant_ids.reservation_id', 'quant_ids.qty') def _compute_qty_available_not_res(self): for rec in self: rec.qty_available_not_res = sum(rec.quant_ids.filtered( lambda x: not x.reservation_id).mapped('qty')) @api.multi def name_get(self): result = [] for rec in self: name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0) result.append((rec.id, name)) return result
from openerp import models, fields, api class StockProductionlot(models.Model): _inherit = 'stock.production.lot' qty_available_not_res = fields.Float( compute='_compute_qty_available_not_res', string='Qty Available Not Reserved', store=True ) @api.multi @api.depends('quant_ids.reservation_id', 'quant_ids.qty') def _compute_qty_available_not_res(self): for rec in self: rec.qty_available_not_res = sum(rec.quant_ids.filtered( lambda x: not x.reservation_id).mapped('qty')) @api.multi def name_get(self): result = [] for rec in self: read = rec.quant_ids.read_group( [('lot_id', '=', rec.id)], ['location_id', 'qty'], 'location_id') locations_qty = ', '.join( ['%s: %s' % (x['location_id'][1], x['qty']) for x in read]) name = '%s (%s)' % (rec.name, locations_qty) result.append((rec.id, name)) return result
FIX in lot name_get to show location with the stock
FIX in lot name_get to show location with the stock
Python
agpl-3.0
ingadhoc/stock
from openerp import models, fields, api class StockProductionlot(models.Model): _inherit = 'stock.production.lot' qty_available_not_res = fields.Float( compute='_compute_qty_available_not_res', string='Qty Available Not Reserved', store=True ) @api.multi @api.depends('quant_ids.reservation_id', 'quant_ids.qty') def _compute_qty_available_not_res(self): for rec in self: rec.qty_available_not_res = sum(rec.quant_ids.filtered( lambda x: not x.reservation_id).mapped('qty')) @api.multi def name_get(self): result = [] for rec in self: - name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0) + read = rec.quant_ids.read_group( + [('lot_id', '=', rec.id)], + ['location_id', 'qty'], 'location_id') + locations_qty = ', '.join( + ['%s: %s' % (x['location_id'][1], x['qty']) for x in read]) + name = '%s (%s)' % (rec.name, locations_qty) result.append((rec.id, name)) return result
FIX in lot name_get to show location with the stock
## Code Before: from openerp import models, fields, api class StockProductionlot(models.Model): _inherit = 'stock.production.lot' qty_available_not_res = fields.Float( compute='_compute_qty_available_not_res', string='Qty Available Not Reserved', store=True ) @api.multi @api.depends('quant_ids.reservation_id', 'quant_ids.qty') def _compute_qty_available_not_res(self): for rec in self: rec.qty_available_not_res = sum(rec.quant_ids.filtered( lambda x: not x.reservation_id).mapped('qty')) @api.multi def name_get(self): result = [] for rec in self: name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0) result.append((rec.id, name)) return result ## Instruction: FIX in lot name_get to show location with the stock ## Code After: from openerp import models, fields, api class StockProductionlot(models.Model): _inherit = 'stock.production.lot' qty_available_not_res = fields.Float( compute='_compute_qty_available_not_res', string='Qty Available Not Reserved', store=True ) @api.multi @api.depends('quant_ids.reservation_id', 'quant_ids.qty') def _compute_qty_available_not_res(self): for rec in self: rec.qty_available_not_res = sum(rec.quant_ids.filtered( lambda x: not x.reservation_id).mapped('qty')) @api.multi def name_get(self): result = [] for rec in self: read = rec.quant_ids.read_group( [('lot_id', '=', rec.id)], ['location_id', 'qty'], 'location_id') locations_qty = ', '.join( ['%s: %s' % (x['location_id'][1], x['qty']) for x in read]) name = '%s (%s)' % (rec.name, locations_qty) result.append((rec.id, name)) return result
from openerp import models, fields, api class StockProductionlot(models.Model): _inherit = 'stock.production.lot' qty_available_not_res = fields.Float( compute='_compute_qty_available_not_res', string='Qty Available Not Reserved', store=True ) @api.multi @api.depends('quant_ids.reservation_id', 'quant_ids.qty') def _compute_qty_available_not_res(self): for rec in self: rec.qty_available_not_res = sum(rec.quant_ids.filtered( lambda x: not x.reservation_id).mapped('qty')) @api.multi def name_get(self): result = [] for rec in self: - name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0) + read = rec.quant_ids.read_group( + [('lot_id', '=', rec.id)], + ['location_id', 'qty'], 'location_id') + locations_qty = ', '.join( + ['%s: %s' % (x['location_id'][1], x['qty']) for x in read]) + name = '%s (%s)' % (rec.name, locations_qty) result.append((rec.id, name)) return result
1c231a8ef54af82d8ec03b828856ddac619fd345
knights/compat/django.py
knights/compat/django.py
import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token))
import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) @register.helper def safe(value): return str(value)
Add a dummy safe filter for Django compat
Add a dummy safe filter for Django compat
Python
mit
funkybob/knights-templater,funkybob/knights-templater
import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) + + @register.helper + def safe(value): + return str(value) +
Add a dummy safe filter for Django compat
## Code Before: import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) ## Instruction: Add a dummy safe filter for Django compat ## Code After: import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) @register.helper def safe(value): return str(value)
import ast from knights.library import Library register = Library() @register.tag def static(parser, token): src = parser.parse_expression(token) return ast.Yield(value=ast.BinOp( left=ast.Str(s='/static/%s'), op=ast.Mod(), right=src, )) @register.tag(name='include') def do_include(parser, token): return ast.Yield(value=ast.Str(s='{include %s}' % token)) + + + @register.helper + def safe(value): + return str(value)
d75a79d10658ad32a9b1d71e472372d8335c7bb6
ml/test_amaranth_lib.py
ml/test_amaranth_lib.py
"""These tests ensure correctness for the helper functions in amaranth_lib.""" import unittest class TestAmaranthHelpers(unittest.TestCase): def test_load_calorie_data(self): raise NotImplementedError def test_clean_data(self): raise NotImplementedError def test_add_calorie_labels(self): raise NotImplementedError def test_num_unique_words(self): raise NotImplementedError def test_max_sequence_length(self): raise NotImplementedError def test_add_input_labels(self): raise NotImplementedError if __name__ == '__main__': unittest.main()
"""These tests ensure correctness for the helper functions in amaranth_lib.""" import unittest class TestAmaranthHelpers(unittest.TestCase): def test_combine_dataframes(self): raise NotImplementedError def test_get_calorie_data(self): raise NotImplementedError def test_clean_data(self): raise NotImplementedError def test_add_calorie_labels(self): raise NotImplementedError def test_num_unique_words(self): raise NotImplementedError def test_max_sequence_length(self): raise NotImplementedError def test_add_input_labels(self): raise NotImplementedError if __name__ == '__main__': unittest.main()
Update testing stubs with helper lib changes
Update testing stubs with helper lib changes
Python
apache-2.0
googleinterns/amaranth,googleinterns/amaranth
"""These tests ensure correctness for the helper functions in amaranth_lib.""" import unittest class TestAmaranthHelpers(unittest.TestCase): + def test_combine_dataframes(self): + raise NotImplementedError + - def test_load_calorie_data(self): + def test_get_calorie_data(self): raise NotImplementedError def test_clean_data(self): raise NotImplementedError def test_add_calorie_labels(self): raise NotImplementedError def test_num_unique_words(self): raise NotImplementedError def test_max_sequence_length(self): raise NotImplementedError def test_add_input_labels(self): raise NotImplementedError if __name__ == '__main__': unittest.main()
Update testing stubs with helper lib changes
## Code Before: """These tests ensure correctness for the helper functions in amaranth_lib.""" import unittest class TestAmaranthHelpers(unittest.TestCase): def test_load_calorie_data(self): raise NotImplementedError def test_clean_data(self): raise NotImplementedError def test_add_calorie_labels(self): raise NotImplementedError def test_num_unique_words(self): raise NotImplementedError def test_max_sequence_length(self): raise NotImplementedError def test_add_input_labels(self): raise NotImplementedError if __name__ == '__main__': unittest.main() ## Instruction: Update testing stubs with helper lib changes ## Code After: """These tests ensure correctness for the helper functions in amaranth_lib.""" import unittest class TestAmaranthHelpers(unittest.TestCase): def test_combine_dataframes(self): raise NotImplementedError def test_get_calorie_data(self): raise NotImplementedError def test_clean_data(self): raise NotImplementedError def test_add_calorie_labels(self): raise NotImplementedError def test_num_unique_words(self): raise NotImplementedError def test_max_sequence_length(self): raise NotImplementedError def test_add_input_labels(self): raise NotImplementedError if __name__ == '__main__': unittest.main()
"""These tests ensure correctness for the helper functions in amaranth_lib.""" import unittest class TestAmaranthHelpers(unittest.TestCase): + def test_combine_dataframes(self): + raise NotImplementedError + - def test_load_calorie_data(self): ? ^^^^ + def test_get_calorie_data(self): ? ^^^ raise NotImplementedError def test_clean_data(self): raise NotImplementedError def test_add_calorie_labels(self): raise NotImplementedError def test_num_unique_words(self): raise NotImplementedError def test_max_sequence_length(self): raise NotImplementedError def test_add_input_labels(self): raise NotImplementedError if __name__ == '__main__': unittest.main()