commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8af349128b725e47b89f28ddc005d142a44c5765
|
openarc/env.py
|
openarc/env.py
|
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
@property
def extcreds(self):
return self.envcfg['extcreds']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
Allow retrieval of external api credentials
|
Allow retrieval of external api credentials
|
Python
|
bsd-3-clause
|
kchoudhu/openarc
|
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
+
+ @property
+ def extcreds(self):
+ return self.envcfg['extcreds']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
Allow retrieval of external api credentials
|
## Code Before:
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
## Instruction:
Allow retrieval of external api credentials
## Code After:
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
@property
def extcreds(self):
return self.envcfg['extcreds']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
+
+ @property
+ def extcreds(self):
+ return self.envcfg['extcreds']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
63f04662f5ca22443ab6080f559ac898302cf103
|
tests/integration/conftest.py
|
tests/integration/conftest.py
|
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
final_list = []
on_redeploy_tests = []
for item in items:
if item.get_marker('on_redeploy') is not None:
on_redeploy_tests.append(item)
else:
final_list.append(item)
final_list.extend(on_redeploy_tests)
items[:] = final_list
|
DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
|
Reorder redeploy tests within a single module
|
Reorder redeploy tests within a single module
The original code for on_redeploy was making the
assumption that there was only one integration test file.
When test_package.py was added, the tests always failed
because the redeploy tests were run *after* the package tests
which messed with the module scope fixtures.
Now we ensure we only reorder tests within test_features.py.
|
Python
|
apache-2.0
|
awslabs/chalice
|
+ DEPLOY_TEST_BASENAME = 'test_features.py'
+
+
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
- final_list = []
- on_redeploy_tests = []
+ start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
+ marked = []
+ unmarked = []
- for item in items:
+ for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
- on_redeploy_tests.append(item)
+ marked.append(item)
else:
- final_list.append(item)
+ unmarked.append(item)
+ items[start:end] = unmarked + marked
- final_list.extend(on_redeploy_tests)
- items[:] = final_list
+
+ def _get_start_end_index(basename, items):
+ # precondition: all the tests for test_features.py are
+ # in a contiguous range. This is the case because pytest
+ # will group all tests in a module together.
+ matched = [item.fspath.basename == basename for item in items]
+ return (
+ matched.index(True),
+ len(matched) - list(reversed(matched)).index(True)
+ )
+
|
Reorder redeploy tests within a single module
|
## Code Before:
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
final_list = []
on_redeploy_tests = []
for item in items:
if item.get_marker('on_redeploy') is not None:
on_redeploy_tests.append(item)
else:
final_list.append(item)
final_list.extend(on_redeploy_tests)
items[:] = final_list
## Instruction:
Reorder redeploy tests within a single module
## Code After:
DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
|
+ DEPLOY_TEST_BASENAME = 'test_features.py'
+
+
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
- final_list = []
- on_redeploy_tests = []
+ start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
+ marked = []
+ unmarked = []
- for item in items:
+ for item in items[start:end]:
? ++++++ +++++
if item.get_marker('on_redeploy') is not None:
- on_redeploy_tests.append(item)
? ^^^ -----------
+ marked.append(item)
? ^^ +
else:
- final_list.append(item)
? ^^ ^^^^^^
+ unmarked.append(item)
? ^ + ^^^^
- final_list.extend(on_redeploy_tests)
- items[:] = final_list
+ items[start:end] = unmarked + marked
+
+
+ def _get_start_end_index(basename, items):
+ # precondition: all the tests for test_features.py are
+ # in a contiguous range. This is the case because pytest
+ # will group all tests in a module together.
+ matched = [item.fspath.basename == basename for item in items]
+ return (
+ matched.index(True),
+ len(matched) - list(reversed(matched)).index(True)
+ )
|
862c2bdeaab094afdd61db862be54a8c4b7c08f3
|
corehq/apps/users/admin.py
|
corehq/apps/users/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
Add ApiKey to Users page in Django Admin
|
Add ApiKey to Users page in Django Admin
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
- from .models import DomainPermissionsMirror
+ from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
+ class ApiKeyInline(admin.TabularInline):
+ model = HQApiKey
+ readonly_fields = ['key', 'created']
+ extra = 1
+
+
class CustomUserAdmin(UserAdmin):
+ inlines = [
+ ApiKeyInline,
+ ]
+
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
Add ApiKey to Users page in Django Admin
|
## Code Before:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
## Instruction:
Add ApiKey to Users page in Django Admin
## Code After:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
- from .models import DomainPermissionsMirror
+ from .models import DomainPermissionsMirror, HQApiKey
? ++++++++++
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
+ class ApiKeyInline(admin.TabularInline):
+ model = HQApiKey
+ readonly_fields = ['key', 'created']
+ extra = 1
+
+
class CustomUserAdmin(UserAdmin):
+ inlines = [
+ ApiKeyInline,
+ ]
+
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
891e8afe5deff5fe7d620abfe8189689d47ec4f8
|
djangocms_inherit/forms.py
|
djangocms_inherit/forms.py
|
from django import forms
from django.forms.models import ModelForm
from django.forms.utils import ErrorList
from django.utils.translation import ugettext_lazy as _
from cms.models import Page
from .models import InheritPagePlaceholder
class InheritForm(ModelForm):
from_page = forms.ModelChoiceField(
label=_("page"), queryset=Page.objects.drafts(), required=False)
class Meta:
model = InheritPagePlaceholder
exclude = ('page', 'position', 'placeholder', 'language',
'plugin_type')
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
self.fields['from_page'].queryset = Page.objects.drafts().on_site(site)
def clean(self):
cleaned_data = super(InheritForm, self).clean()
if not cleaned_data['from_page'] and not cleaned_data['from_language']:
self._errors['from_page'] = ErrorList(
[_("Language or Page must be filled out")])
return cleaned_data
|
from django import forms
from django.forms.models import ModelForm
try:
from django.forms.utils import ErrorList
except ImportError:
# Django<1.7 (deprecated in Django 1.8, removed in 1.9)
from django.forms.util import ErrorList
from django.utils.translation import ugettext_lazy as _
from cms.models import Page
from .models import InheritPagePlaceholder
class InheritForm(ModelForm):
from_page = forms.ModelChoiceField(
label=_("page"), queryset=Page.objects.drafts(), required=False)
class Meta:
model = InheritPagePlaceholder
exclude = ('page', 'position', 'placeholder', 'language',
'plugin_type')
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
self.fields['from_page'].queryset = Page.objects.drafts().on_site(site)
def clean(self):
cleaned_data = super(InheritForm, self).clean()
if not cleaned_data['from_page'] and not cleaned_data['from_language']:
self._errors['from_page'] = ErrorList(
[_("Language or Page must be filled out")])
return cleaned_data
|
Make import backward compatible (Django<1.7)
|
Make import backward compatible (Django<1.7)
|
Python
|
bsd-3-clause
|
bittner/djangocms-inherit,bittner/djangocms-inherit,divio/djangocms-inherit,divio/djangocms-inherit,divio/djangocms-inherit
|
from django import forms
from django.forms.models import ModelForm
+ try:
- from django.forms.utils import ErrorList
+ from django.forms.utils import ErrorList
+ except ImportError:
+ # Django<1.7 (deprecated in Django 1.8, removed in 1.9)
+ from django.forms.util import ErrorList
from django.utils.translation import ugettext_lazy as _
from cms.models import Page
from .models import InheritPagePlaceholder
class InheritForm(ModelForm):
from_page = forms.ModelChoiceField(
label=_("page"), queryset=Page.objects.drafts(), required=False)
class Meta:
model = InheritPagePlaceholder
exclude = ('page', 'position', 'placeholder', 'language',
'plugin_type')
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
self.fields['from_page'].queryset = Page.objects.drafts().on_site(site)
def clean(self):
cleaned_data = super(InheritForm, self).clean()
if not cleaned_data['from_page'] and not cleaned_data['from_language']:
self._errors['from_page'] = ErrorList(
[_("Language or Page must be filled out")])
return cleaned_data
|
Make import backward compatible (Django<1.7)
|
## Code Before:
from django import forms
from django.forms.models import ModelForm
from django.forms.utils import ErrorList
from django.utils.translation import ugettext_lazy as _
from cms.models import Page
from .models import InheritPagePlaceholder
class InheritForm(ModelForm):
from_page = forms.ModelChoiceField(
label=_("page"), queryset=Page.objects.drafts(), required=False)
class Meta:
model = InheritPagePlaceholder
exclude = ('page', 'position', 'placeholder', 'language',
'plugin_type')
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
self.fields['from_page'].queryset = Page.objects.drafts().on_site(site)
def clean(self):
cleaned_data = super(InheritForm, self).clean()
if not cleaned_data['from_page'] and not cleaned_data['from_language']:
self._errors['from_page'] = ErrorList(
[_("Language or Page must be filled out")])
return cleaned_data
## Instruction:
Make import backward compatible (Django<1.7)
## Code After:
from django import forms
from django.forms.models import ModelForm
try:
from django.forms.utils import ErrorList
except ImportError:
# Django<1.7 (deprecated in Django 1.8, removed in 1.9)
from django.forms.util import ErrorList
from django.utils.translation import ugettext_lazy as _
from cms.models import Page
from .models import InheritPagePlaceholder
class InheritForm(ModelForm):
from_page = forms.ModelChoiceField(
label=_("page"), queryset=Page.objects.drafts(), required=False)
class Meta:
model = InheritPagePlaceholder
exclude = ('page', 'position', 'placeholder', 'language',
'plugin_type')
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
self.fields['from_page'].queryset = Page.objects.drafts().on_site(site)
def clean(self):
cleaned_data = super(InheritForm, self).clean()
if not cleaned_data['from_page'] and not cleaned_data['from_language']:
self._errors['from_page'] = ErrorList(
[_("Language or Page must be filled out")])
return cleaned_data
|
from django import forms
from django.forms.models import ModelForm
+ try:
- from django.forms.utils import ErrorList
+ from django.forms.utils import ErrorList
? ++++
+ except ImportError:
+ # Django<1.7 (deprecated in Django 1.8, removed in 1.9)
+ from django.forms.util import ErrorList
from django.utils.translation import ugettext_lazy as _
from cms.models import Page
from .models import InheritPagePlaceholder
class InheritForm(ModelForm):
from_page = forms.ModelChoiceField(
label=_("page"), queryset=Page.objects.drafts(), required=False)
class Meta:
model = InheritPagePlaceholder
exclude = ('page', 'position', 'placeholder', 'language',
'plugin_type')
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
self.fields['from_page'].queryset = Page.objects.drafts().on_site(site)
def clean(self):
cleaned_data = super(InheritForm, self).clean()
if not cleaned_data['from_page'] and not cleaned_data['from_language']:
self._errors['from_page'] = ErrorList(
[_("Language or Page must be filled out")])
return cleaned_data
|
85c509913cc9a6b22036c33eccb07277b39260e3
|
pygraphc/anomaly/AnomalyScore.py
|
pygraphc/anomaly/AnomalyScore.py
|
import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
|
import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
|
Add description of Parameters section in docstring
|
Add description of Parameters section in docstring
|
Python
|
mit
|
studiawan/pygraphc
|
import csv
-
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
+ A graph to be analyzed for its anomaly.
clusters : dict[list]
+ Dictionary of list containing node identifier for each clusters.
filename : str
+ Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
|
Add description of Parameters section in docstring
|
## Code Before:
import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
## Instruction:
Add description of Parameters section in docstring
## Code After:
import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
|
import csv
-
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
+ A graph to be analyzed for its anomaly.
clusters : dict[list]
+ Dictionary of list containing node identifier for each clusters.
filename : str
+ Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
|
8f094e1c3d4a64942cadf5603ce5b23706381fac
|
nubes/cmd/__init__.py
|
nubes/cmd/__init__.py
|
import openstack
def main():
print("Hello Clouds!")
|
import argparse
from nubes import dispatcher
def main():
parser = argparse.ArgumentParser(description='Universal IaaS CLI')
parser.add_argument('connector', help='IaaS Name')
parser.add_argument('resource', help='Resource to perform action')
parser.add_argument('action', help='Action to perform on resource')
parser.add_argument('--auth-url')
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--project-name')
args = parser.parse_args()
dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
args.username, args.password,
args.project_name)
resource = args.resource
if args.action == 'list':
# make plural
resource = args.resource + 's'
method_name = '_'.join([args.action, resource])
return getattr(dispatch, method_name)()
|
Make crude CLI commands work
|
Make crude CLI commands work
This is mainly as an example to show what it can look like.
|
Python
|
apache-2.0
|
omninubes/nubes
|
- import openstack
+ import argparse
+
+ from nubes import dispatcher
def main():
- print("Hello Clouds!")
+ parser = argparse.ArgumentParser(description='Universal IaaS CLI')
+ parser.add_argument('connector', help='IaaS Name')
+ parser.add_argument('resource', help='Resource to perform action')
+ parser.add_argument('action', help='Action to perform on resource')
+ parser.add_argument('--auth-url')
+ parser.add_argument('--username')
+ parser.add_argument('--password')
+ parser.add_argument('--project-name')
+ args = parser.parse_args()
+ dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
+ args.username, args.password,
+ args.project_name)
+ resource = args.resource
+ if args.action == 'list':
+ # make plural
+ resource = args.resource + 's'
+ method_name = '_'.join([args.action, resource])
+ return getattr(dispatch, method_name)()
+
|
Make crude CLI commands work
|
## Code Before:
import openstack
def main():
print("Hello Clouds!")
## Instruction:
Make crude CLI commands work
## Code After:
import argparse
from nubes import dispatcher
def main():
parser = argparse.ArgumentParser(description='Universal IaaS CLI')
parser.add_argument('connector', help='IaaS Name')
parser.add_argument('resource', help='Resource to perform action')
parser.add_argument('action', help='Action to perform on resource')
parser.add_argument('--auth-url')
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--project-name')
args = parser.parse_args()
dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
args.username, args.password,
args.project_name)
resource = args.resource
if args.action == 'list':
# make plural
resource = args.resource + 's'
method_name = '_'.join([args.action, resource])
return getattr(dispatch, method_name)()
|
- import openstack
+ import argparse
+
+ from nubes import dispatcher
def main():
- print("Hello Clouds!")
+ parser = argparse.ArgumentParser(description='Universal IaaS CLI')
+ parser.add_argument('connector', help='IaaS Name')
+ parser.add_argument('resource', help='Resource to perform action')
+ parser.add_argument('action', help='Action to perform on resource')
+ parser.add_argument('--auth-url')
+ parser.add_argument('--username')
+ parser.add_argument('--password')
+ parser.add_argument('--project-name')
+ args = parser.parse_args()
+ dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
+ args.username, args.password,
+ args.project_name)
+ resource = args.resource
+ if args.action == 'list':
+ # make plural
+ resource = args.resource + 's'
+
+ method_name = '_'.join([args.action, resource])
+ return getattr(dispatch, method_name)()
|
155f53100148ffd09e9e0e0f1f9de073974ea97b
|
osgtest/tests/test_89_condor.py
|
osgtest/tests/test_89_condor.py
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
Use skip_ok_unless instead of a comparison against 'False'
|
Use skip_ok_unless instead of a comparison against 'False'
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
- self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
+ self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
Use skip_ok_unless instead of a comparison against 'False'
|
## Code Before:
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
## Instruction:
Use skip_ok_unless instead of a comparison against 'False'
## Code After:
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
- self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
? ^^ ---------
+ self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
? ^^^^^^
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
2b58318ad7134a8c894b70918520a89b51a2d6dd
|
cla_backend/apps/reports/tests/test_utils.py
|
cla_backend/apps/reports/tests/test_utils.py
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
def test_get_s3_connection(self):
envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
Modify s3 connection test for new AWS_S3_HOST setting
|
Modify s3 connection test for new AWS_S3_HOST setting
The value is now calculated from the env var at load time, so mocking
the env var value is not effective
(cherry picked from commit 044219df7123e3a03a38cc06c9e8e8e9e80b0cbe)
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
- @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
+ @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
- envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
+ envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
Modify s3 connection test for new AWS_S3_HOST setting
|
## Code Before:
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
def test_get_s3_connection(self):
envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
## Instruction:
Modify s3 connection test for new AWS_S3_HOST setting
## Code After:
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
- @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
+ @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
? ++++++++++++++++++++++++++++++++++++++++++
def test_get_s3_connection(self):
- envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
+ envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
1a0c9fd8e8d6ce59c2d6ea42c59dfa6497400753
|
buildscripts/condarecipe/run_test.py
|
buildscripts/condarecipe/run_test.py
|
import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
|
import sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
Fix buildscript for Python2 on OSX
|
Fix buildscript for Python2 on OSX
|
Python
|
bsd-3-clause
|
llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy
|
import sys
import platform
import llvm
+ from llvm.ee import TargetMachine
+ target = TargetMachine.new()
-
- from llvm.core import Module
- from llvm.ee import EngineBuilder
- from llvm.utils import check_intrinsics
-
- m = Module.new('fjoidajfa')
- eb = EngineBuilder.new(m)
- target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
- #check_intrinsics.main()
+ print('llvm.__version__: %s' % llvm.__version__)
- print('llvm.__version__: %s' % llvm.__version__)
- #assert llvm.__version__ == '0.12.0'
-
|
Fix buildscript for Python2 on OSX
|
## Code Before:
import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
## Instruction:
Fix buildscript for Python2 on OSX
## Code After:
import sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
import sys
import platform
import llvm
+ from llvm.ee import TargetMachine
+ target = TargetMachine.new()
-
- from llvm.core import Module
- from llvm.ee import EngineBuilder
- from llvm.utils import check_intrinsics
-
- m = Module.new('fjoidajfa')
- eb = EngineBuilder.new(m)
- target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
- #check_intrinsics.main()
-
print('llvm.__version__: %s' % llvm.__version__)
- #assert llvm.__version__ == '0.12.0'
|
cd359f8487ee5aab3645a0089695967802e485d0
|
samples/python/uppercase/py/func.py
|
samples/python/uppercase/py/func.py
|
import os,sys
sys.path.insert(0, os.path.abspath('.'))
import grpc
import time
import function_pb2_grpc as function
import fntypes_pb2 as types
from concurrent import futures
class StringFunctionServicer(function.StringFunctionServicer):
def Call(self, request, context):
reply = types.Reply()
reply.body = request.body.upper()
return reply
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
function.add_StringFunctionServicer_to_server(StringFunctionServicer(), server)
server.add_insecure_port('%s:%s' % ('[::]', os.environ.get("GRPC_PORT","10382")))
server.start()
while True:
time.sleep(10)
|
import os,sys
sys.path.insert(0, os.path.abspath('.'))
import grpc
import time
import function_pb2_grpc as function
import fntypes_pb2 as types
from concurrent import futures
'''
This method’s semantics are a combination of those of the request-streaming method and the response-streaming method.
It is passed an iterator of request values and is itself an iterator of response values.
'''
class StringFunctionServicer(function.StringFunctionServicer):
def Call(self, request_iterator, context):
for request in request_iterator:
reply = types.Reply()
reply.body = request.body.upper()
yield reply
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
function.add_StringFunctionServicer_to_server(StringFunctionServicer(), server)
server.add_insecure_port('%s:%s' % ('[::]', os.environ.get("GRPC_PORT","10382")))
server.start()
while True:
time.sleep(10)
|
Enable GRPC Streaming in Python uppercase sample
|
Enable GRPC Streaming in Python uppercase sample
|
Python
|
apache-2.0
|
markfisher/sk8s,markfisher/sk8s,markfisher/sk8s,markfisher/sk8s
|
import os,sys
sys.path.insert(0, os.path.abspath('.'))
import grpc
import time
import function_pb2_grpc as function
import fntypes_pb2 as types
from concurrent import futures
+ '''
+ This method’s semantics are a combination of those of the request-streaming method and the response-streaming method.
+ It is passed an iterator of request values and is itself an iterator of response values.
+ '''
class StringFunctionServicer(function.StringFunctionServicer):
- def Call(self, request, context):
+ def Call(self, request_iterator, context):
+ for request in request_iterator:
- reply = types.Reply()
+ reply = types.Reply()
- reply.body = request.body.upper()
+ reply.body = request.body.upper()
- return reply
+ yield reply
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
function.add_StringFunctionServicer_to_server(StringFunctionServicer(), server)
server.add_insecure_port('%s:%s' % ('[::]', os.environ.get("GRPC_PORT","10382")))
server.start()
while True:
time.sleep(10)
|
Enable GRPC Streaming in Python uppercase sample
|
## Code Before:
import os,sys
sys.path.insert(0, os.path.abspath('.'))
import grpc
import time
import function_pb2_grpc as function
import fntypes_pb2 as types
from concurrent import futures
class StringFunctionServicer(function.StringFunctionServicer):
def Call(self, request, context):
reply = types.Reply()
reply.body = request.body.upper()
return reply
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
function.add_StringFunctionServicer_to_server(StringFunctionServicer(), server)
server.add_insecure_port('%s:%s' % ('[::]', os.environ.get("GRPC_PORT","10382")))
server.start()
while True:
time.sleep(10)
## Instruction:
Enable GRPC Streaming in Python uppercase sample
## Code After:
import os,sys
sys.path.insert(0, os.path.abspath('.'))
import grpc
import time
import function_pb2_grpc as function
import fntypes_pb2 as types
from concurrent import futures
'''
This method’s semantics are a combination of those of the request-streaming method and the response-streaming method.
It is passed an iterator of request values and is itself an iterator of response values.
'''
class StringFunctionServicer(function.StringFunctionServicer):
def Call(self, request_iterator, context):
for request in request_iterator:
reply = types.Reply()
reply.body = request.body.upper()
yield reply
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
function.add_StringFunctionServicer_to_server(StringFunctionServicer(), server)
server.add_insecure_port('%s:%s' % ('[::]', os.environ.get("GRPC_PORT","10382")))
server.start()
while True:
time.sleep(10)
|
import os,sys
sys.path.insert(0, os.path.abspath('.'))
import grpc
import time
import function_pb2_grpc as function
import fntypes_pb2 as types
from concurrent import futures
+ '''
+ This method’s semantics are a combination of those of the request-streaming method and the response-streaming method.
+ It is passed an iterator of request values and is itself an iterator of response values.
+ '''
class StringFunctionServicer(function.StringFunctionServicer):
- def Call(self, request, context):
+ def Call(self, request_iterator, context):
? +++++++++
+ for request in request_iterator:
- reply = types.Reply()
+ reply = types.Reply()
? ++++
- reply.body = request.body.upper()
+ reply.body = request.body.upper()
? ++++
- return reply
+ yield reply
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
function.add_StringFunctionServicer_to_server(StringFunctionServicer(), server)
server.add_insecure_port('%s:%s' % ('[::]', os.environ.get("GRPC_PORT","10382")))
server.start()
while True:
time.sleep(10)
|
1a71fba6224a9757f19e702a3b9a1cebf496a754
|
src/loop+blkback/plugin.py
|
src/loop+blkback/plugin.py
|
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
|
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
Use the new xapi.storage package hierarchy
|
Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <[email protected]>
|
Python
|
lgpl-2.1
|
jjd27/xapi-storage-datapath-plugins,robertbreker/xapi-storage-datapath-plugins,djs55/xapi-storage-datapath-plugins,xapi-project/xapi-storage-datapath-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins
|
import os
import sys
import xapi
- import xapi.plugin
+ import xapi.storage.api.plugin
- from xapi.storage.datapath import log
+ from xapi.storage import log
- class Implementation(xapi.plugin.Plugin_skeleton):
+ class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
- cmd = xapi.plugin.Plugin_commandline(Implementation())
+ cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
- raise xapi.plugin.Unimplemented(base)
+ raise xapi.storage.api.plugin.Unimplemented(base)
|
Use the new xapi.storage package hierarchy
|
## Code Before:
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
## Instruction:
Use the new xapi.storage package hierarchy
## Code After:
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
import os
import sys
import xapi
- import xapi.plugin
+ import xapi.storage.api.plugin
? ++++++++++++
- from xapi.storage.datapath import log
? ---------
+ from xapi.storage import log
- class Implementation(xapi.plugin.Plugin_skeleton):
+ class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
? ++++++++++++
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
- cmd = xapi.plugin.Plugin_commandline(Implementation())
+ cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
? ++++++++++++
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
- raise xapi.plugin.Unimplemented(base)
+ raise xapi.storage.api.plugin.Unimplemented(base)
? ++++++++++++
|
95542ab1b7c22a6e0160e242349c66f2cef7e390
|
syntacticframes_project/syntacticframes/management/commands/check_correspondance_errors.py
|
syntacticframes_project/syntacticframes/management/commands/check_correspondance_errors.py
|
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
try:
parse.get_ladl_list(vn_class.ladl_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
try:
parse.get_lvf_list(vn_class.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
|
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for frameset in VerbNetFrameSet.objects.all():
print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
if frameset.ladl_string:
try:
parse.FrenchMapping('LADL', frameset.ladl_string).result()
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
if frameset.lvf_string:
try:
parse.FrenchMapping('LVF', frameset.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
|
Check correspondances in framesets now
|
Check correspondances in framesets now
|
Python
|
mit
|
aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor
|
from django.core.management.base import BaseCommand
- from syntacticframes.models import VerbNetClass
+ from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
- for vn_class in VerbNetClass.objects.all():
+ for frameset in VerbNetFrameSet.objects.all():
+ print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
- try:
- parse.get_ladl_list(vn_class.ladl_string)
- except parse.UnknownClassException as e:
- print('{:<30} {}'.format(vn_class.name, e))
+ if frameset.ladl_string:
- try:
+ try:
- parse.get_lvf_list(vn_class.lvf_string)
+ parse.FrenchMapping('LADL', frameset.ladl_string).result()
- except parse.UnknownClassException as e:
+ except parse.UnknownClassException as e:
- print('{:<30} {}'.format(vn_class.name, e))
+ print('{:<30} {}'.format(frameset.name, e))
+ if frameset.lvf_string:
+ try:
+ parse.FrenchMapping('LVF', frameset.lvf_string)
+ except parse.UnknownClassException as e:
+ print('{:<30} {}'.format(frameset.name, e))
+
|
Check correspondances in framesets now
|
## Code Before:
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
try:
parse.get_ladl_list(vn_class.ladl_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
try:
parse.get_lvf_list(vn_class.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
## Instruction:
Check correspondances in framesets now
## Code After:
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for frameset in VerbNetFrameSet.objects.all():
print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
if frameset.ladl_string:
try:
parse.FrenchMapping('LADL', frameset.ladl_string).result()
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
if frameset.lvf_string:
try:
parse.FrenchMapping('LVF', frameset.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
|
from django.core.management.base import BaseCommand
- from syntacticframes.models import VerbNetClass
? ^^ ^^
+ from syntacticframes.models import VerbNetFrameSet
? ^^ ^^^^^
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
- for vn_class in VerbNetClass.objects.all():
? ^^^^^ ^ ^^ ^^
+ for frameset in VerbNetFrameSet.objects.all():
? ^^ ++ ^^ ^^ ^^^^^
+ print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
- try:
- parse.get_ladl_list(vn_class.ladl_string)
- except parse.UnknownClassException as e:
- print('{:<30} {}'.format(vn_class.name, e))
+ if frameset.ladl_string:
- try:
+ try:
? ++++
- parse.get_lvf_list(vn_class.lvf_string)
+ parse.FrenchMapping('LADL', frameset.ladl_string).result()
- except parse.UnknownClassException as e:
+ except parse.UnknownClassException as e:
? ++++
- print('{:<30} {}'.format(vn_class.name, e))
? ^^^^^ ^
+ print('{:<30} {}'.format(frameset.name, e))
? ++++ ^^ ++ ^^
+
+ if frameset.lvf_string:
+ try:
+ parse.FrenchMapping('LVF', frameset.lvf_string)
+ except parse.UnknownClassException as e:
+ print('{:<30} {}'.format(frameset.name, e))
|
d0ccfd4558b9dcf1610140c9df95cec284f0fbe3
|
correos_project/correos/managers.py
|
correos_project/correos/managers.py
|
from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
|
from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
|
Use email username if no realname is found in header
|
Use email username if no realname is found in header
|
Python
|
bsd-3-clause
|
transcode-de/correos,transcode-de/correos,transcode-de/correos
|
from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
+ if len(realname) == 0:
+ realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
|
Use email username if no realname is found in header
|
## Code Before:
from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
## Instruction:
Use email username if no realname is found in header
## Code After:
from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
|
from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
+ if len(realname) == 0:
+ realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
|
cc8624cfa3788dc66e7afb144fc24ef5f1a79ff9
|
scripts/json-concat-lists.py
|
scripts/json-concat-lists.py
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
Remove duplicate JSON gaferences when concatenating
|
Remove duplicate JSON gaferences when concatenating
|
Python
|
bsd-3-clause
|
geneontology/go-site,geneontology/go-site,geneontology/go-site,geneontology/go-site,geneontology/go-site
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
- list_all += json.load(in_f)
+ file_jsons = json.load(in_f)
+ for fj in file_jsons:
+ if fj not in list_all:
+ list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
Remove duplicate JSON gaferences when concatenating
|
## Code Before:
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
## Instruction:
Remove duplicate JSON gaferences when concatenating
## Code After:
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
- list_all += json.load(in_f)
? ^ ^^^^^ -
+ file_jsons = json.load(in_f)
? ++ ^^^ ^^^
+ for fj in file_jsons:
+ if fj not in list_all:
+ list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
72064e373e6b13f5847199aeb8116ab1708523b2
|
astroquery/cadc/tests/setup_package.py
|
astroquery/cadc/tests/setup_package.py
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
Add fits file to package build
|
Add fits file to package build
|
Python
|
bsd-3-clause
|
imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
+ os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
Add fits file to package build
|
## Code Before:
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
## Instruction:
Add fits file to package build
## Code After:
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
+ os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
a938128b1e6b7654f93047883c90bf7b80ee564e
|
pentai/t_all.py
|
pentai/t_all.py
|
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
|
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
Delete test db after a run
|
Delete test db after a run
|
Python
|
mit
|
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
|
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
+ import pentai.db.test_db as tdb_m
-
- import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
- z_m.set_db("test.db")
-
unittest.TextTestRunner().run(suite())
-
- os.unlink("test.db")
- os.unlink("test.db.lock")
- os.unlink("test.db.tmp")
- os.unlink("test.db.index")
if __name__ == "__main__":
main()
+ tdb_m.delete_test_db()
|
Delete test db after a run
|
## Code Before:
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
## Instruction:
Delete test db after a run
## Code After:
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
+ import pentai.db.test_db as tdb_m
-
- import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
- z_m.set_db("test.db")
-
unittest.TextTestRunner().run(suite())
-
- os.unlink("test.db")
- os.unlink("test.db.lock")
- os.unlink("test.db.tmp")
- os.unlink("test.db.index")
if __name__ == "__main__":
main()
+ tdb_m.delete_test_db()
|
7f9a31a03e68e1d9dc6f420c6aa157e657da4157
|
apps/core/templatetags/files.py
|
apps/core/templatetags/files.py
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes traceback lines from a string (if any). It has no effect when
no 'Traceback' pattern has been found.
Returns: raws before the 'Traceback' pattern
"""
return Path(path).name
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes parent path from a relative or absolute filename
Returns: the filename
"""
return Path(path).name
|
Fix filename template tag docstring
|
Fix filename template tag docstring
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
+ """Removes parent path from a relative or absolute filename
- """Removes traceback lines from a string (if any). It has no effect when
- no 'Traceback' pattern has been found.
- Returns: raws before the 'Traceback' pattern
+ Returns: the filename
"""
return Path(path).name
|
Fix filename template tag docstring
|
## Code Before:
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes traceback lines from a string (if any). It has no effect when
no 'Traceback' pattern has been found.
Returns: raws before the 'Traceback' pattern
"""
return Path(path).name
## Instruction:
Fix filename template tag docstring
## Code After:
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes parent path from a relative or absolute filename
Returns: the filename
"""
return Path(path).name
|
from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
+ """Removes parent path from a relative or absolute filename
- """Removes traceback lines from a string (if any). It has no effect when
- no 'Traceback' pattern has been found.
- Returns: raws before the 'Traceback' pattern
+ Returns: the filename
"""
return Path(path).name
|
da54fa6d681ab7f2e3146b55d562e5a4d68623cc
|
luigi/tasks/export/ftp/__init__.py
|
luigi/tasks/export/ftp/__init__.py
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
Make GO term export part of FTP export
|
Make GO term export part of FTP export
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
+ from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
+ yield GoAnnotationExport
|
Make GO term export part of FTP export
|
## Code Before:
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
## Instruction:
Make GO term export part of FTP export
## Code After:
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
+ from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
+ yield GoAnnotationExport
|
570a4911f0babf884fa57b4509957bd94fc790ed
|
moita/pipelines.py
|
moita/pipelines.py
|
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
def open_spider(self, spider):
self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
Add date at the end
|
Add date at the end
|
Python
|
mit
|
ranisalt/moita-ufsc-crawler
|
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
- def open_spider(self, spider):
- self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
-
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
+ self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
Add date at the end
|
## Code Before:
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
def open_spider(self, spider):
self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
## Instruction:
Add date at the end
## Code After:
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
- def open_spider(self, spider):
- self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
-
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
+ self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
f495ecb5f9131c2c13c41e78cc3fc2e182bdc8fc
|
hotline/db/db_redis.py
|
hotline/db/db_redis.py
|
import os
import redis
from urllib.parse import urlparse
redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379')
redis_url_parse = urlparse(redis_url)
redis_client = redis.StrictRedis(host=redis_url_parse.hostname, port=redis_url_parse.port)
|
from db.db_abstract import AbstractClient
from redis import StrictRedis
from urllib.parse import urlparse
class RedisClient(AbstractClient):
def __init__(self, url):
self.url = url
self.client = None
def connect(self):
redis_url = urlparse(self.url)
self.client = StrictRedis(host=url.hostname, port=url.port, password=url.password)
def get(self, **kwargs):
pass
def set(self, **kwargs):
pass
def update(self, **kwargs):
pass
def delete(self, **kwargs):
pass
|
Update to inherit from abstract class
|
Update to inherit from abstract class
|
Python
|
mit
|
wearhacks/hackathon_hotline
|
+ from db.db_abstract import AbstractClient
+ from redis import StrictRedis
- import os
- import redis
-
from urllib.parse import urlparse
+ class RedisClient(AbstractClient):
- redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379')
- redis_url_parse = urlparse(redis_url)
- redis_client = redis.StrictRedis(host=redis_url_parse.hostname, port=redis_url_parse.port)
+ def __init__(self, url):
+ self.url = url
+ self.client = None
+ def connect(self):
+ redis_url = urlparse(self.url)
+ self.client = StrictRedis(host=url.hostname, port=url.port, password=url.password)
+ def get(self, **kwargs):
+ pass
+
+ def set(self, **kwargs):
+ pass
+
+ def update(self, **kwargs):
+ pass
+
+ def delete(self, **kwargs):
+ pass
+
|
Update to inherit from abstract class
|
## Code Before:
import os
import redis
from urllib.parse import urlparse
redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379')
redis_url_parse = urlparse(redis_url)
redis_client = redis.StrictRedis(host=redis_url_parse.hostname, port=redis_url_parse.port)
## Instruction:
Update to inherit from abstract class
## Code After:
from db.db_abstract import AbstractClient
from redis import StrictRedis
from urllib.parse import urlparse
class RedisClient(AbstractClient):
def __init__(self, url):
self.url = url
self.client = None
def connect(self):
redis_url = urlparse(self.url)
self.client = StrictRedis(host=url.hostname, port=url.port, password=url.password)
def get(self, **kwargs):
pass
def set(self, **kwargs):
pass
def update(self, **kwargs):
pass
def delete(self, **kwargs):
pass
|
+ from db.db_abstract import AbstractClient
+ from redis import StrictRedis
- import os
- import redis
-
from urllib.parse import urlparse
+ class RedisClient(AbstractClient):
- redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379')
- redis_url_parse = urlparse(redis_url)
- redis_client = redis.StrictRedis(host=redis_url_parse.hostname, port=redis_url_parse.port)
+ def __init__(self, url):
+ self.url = url
+ self.client = None
+ def connect(self):
+ redis_url = urlparse(self.url)
+ self.client = StrictRedis(host=url.hostname, port=url.port, password=url.password)
+
+ def get(self, **kwargs):
+ pass
+
+ def set(self, **kwargs):
+ pass
+
+ def update(self, **kwargs):
+ pass
+
+ def delete(self, **kwargs):
+ pass
|
eaff795bddb0e07f4ad4e4c9277c5c0f6f199380
|
salt/beacons/__init__.py
|
salt/beacons/__init__.py
|
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
|
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
Add id tot he beacon event dataset
|
Add id tot he beacon event dataset
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
+ if not 'id' in data:
+ data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
Add id tot he beacon event dataset
|
## Code Before:
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
## Instruction:
Add id tot he beacon event dataset
## Code After:
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
+ if not 'id' in data:
+ data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
|
8c228a79450c49ee1d494ca1e3cf61ea7bcc8177
|
setup.py
|
setup.py
|
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
os.environ["STEAM_API_KEY"] = self.key
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
Set API key directly in test runner
|
Set API key directly in test runner
|
Python
|
isc
|
miedzinski/steamodd,Lagg/steamodd
|
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
- import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
- os.environ["STEAM_API_KEY"] = self.key
+ steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
Set API key directly in test runner
|
## Code Before:
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
os.environ["STEAM_API_KEY"] = self.key
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
## Instruction:
Set API key directly in test runner
## Code After:
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
- import os
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
- os.environ["STEAM_API_KEY"] = self.key
+ steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
0ebf51994a73fdc7c4f13b274fc41bef541eea52
|
deflect/widgets.py
|
deflect/widgets.py
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
Hide the option set from incompatible browsers
|
Hide the option set from incompatible browsers
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
- output.append('<select>')
+ output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
Hide the option set from incompatible browsers
|
## Code Before:
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select>')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
## Instruction:
Hide the option set from incompatible browsers
## Code After:
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
output.append('<select style="display:none">')
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
from __future__ import unicode_literals
from itertools import chain
from django import forms
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class DataListInput(forms.TextInput):
"""
A form widget that displays a standard ``TextInput`` field, as well
as an HTML5 datalist element. This provides a set of options that
the user can select from, along with the ability to enter a custom
value. Suggested options are matched as the user begins typing.
"""
def __init__(self, attrs=None, choices=()):
super(DataListInput, self).__init__(attrs)
self.choices = list(chain.from_iterable(choices))
def render(self, name, value, attrs={}, choices=()):
attrs['list'] = 'id_%s_list' % name
output = [super(DataListInput, self).render(name, value, attrs)]
options = self.render_options(name, choices)
if options:
output.append(options)
return mark_safe('\n'.join(output))
def render_options(self, name, choices):
output = []
output.append('<datalist id="id_%s_list">' % name)
- output.append('<select>')
+ output.append('<select style="display:none">')
? +++++++++++++++++++++
for option in chain(self.choices, choices):
output.append(format_html('<option value="{0}" />', force_text(option)))
output.append('</select>')
output.append('</datalist>')
return '\n'.join(output)
|
e2574515d9879a5aa023df949031370969dc896c
|
runtests.py
|
runtests.py
|
import os
import sys
import six
if six.PY2:
import unittest2 as unittest
else:
import unittest
def main():
# Configure python path
parent = os.path.dirname(os.path.abspath(__file__))
if not parent in sys.path:
sys.path.insert(0, parent)
# Discover tests
os.environ['DJANGO_SETTINGS_MODULE'] = 'djedi.tests.settings'
unittest.defaultTestLoader.discover('djedi')
import django
if hasattr(django, "setup"):
django.setup()
# Run tests
import django
if hasattr(django, 'setup'):
django.setup()
if django.VERSION < (1,7):
from django.test.simple import DjangoTestSuiteRunner as TestRunner
else:
from django.test.runner import DiscoverRunner as TestRunner
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
exit_code = runner.run_tests(['djedi'])
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
import os
import sys
import six
if six.PY2:
import unittest2 as unittest
else:
import unittest
def main():
# Configure python path
parent = os.path.dirname(os.path.abspath(__file__))
if not parent in sys.path:
sys.path.insert(0, parent)
# Discover tests
os.environ['DJANGO_SETTINGS_MODULE'] = 'djedi.tests.settings'
unittest.defaultTestLoader.discover('djedi')
# Run tests
import django
if hasattr(django, 'setup'):
django.setup()
if django.VERSION < (1,7):
from django.test.simple import DjangoTestSuiteRunner as TestRunner
else:
from django.test.runner import DiscoverRunner as TestRunner
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
exit_code = runner.run_tests(['djedi'])
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
Remove extra setup() from rebase.
|
Remove extra setup() from rebase.
|
Python
|
bsd-3-clause
|
andreif/djedi-cms,andreif/djedi-cms,5monkeys/djedi-cms,5monkeys/djedi-cms,andreif/djedi-cms,5monkeys/djedi-cms
|
import os
import sys
import six
if six.PY2:
import unittest2 as unittest
else:
import unittest
def main():
# Configure python path
parent = os.path.dirname(os.path.abspath(__file__))
if not parent in sys.path:
sys.path.insert(0, parent)
# Discover tests
os.environ['DJANGO_SETTINGS_MODULE'] = 'djedi.tests.settings'
unittest.defaultTestLoader.discover('djedi')
-
- import django
-
- if hasattr(django, "setup"):
- django.setup()
# Run tests
import django
if hasattr(django, 'setup'):
django.setup()
if django.VERSION < (1,7):
from django.test.simple import DjangoTestSuiteRunner as TestRunner
else:
from django.test.runner import DiscoverRunner as TestRunner
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
exit_code = runner.run_tests(['djedi'])
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
Remove extra setup() from rebase.
|
## Code Before:
import os
import sys
import six
if six.PY2:
import unittest2 as unittest
else:
import unittest
def main():
# Configure python path
parent = os.path.dirname(os.path.abspath(__file__))
if not parent in sys.path:
sys.path.insert(0, parent)
# Discover tests
os.environ['DJANGO_SETTINGS_MODULE'] = 'djedi.tests.settings'
unittest.defaultTestLoader.discover('djedi')
import django
if hasattr(django, "setup"):
django.setup()
# Run tests
import django
if hasattr(django, 'setup'):
django.setup()
if django.VERSION < (1,7):
from django.test.simple import DjangoTestSuiteRunner as TestRunner
else:
from django.test.runner import DiscoverRunner as TestRunner
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
exit_code = runner.run_tests(['djedi'])
sys.exit(exit_code)
if __name__ == '__main__':
main()
## Instruction:
Remove extra setup() from rebase.
## Code After:
import os
import sys
import six
if six.PY2:
import unittest2 as unittest
else:
import unittest
def main():
# Configure python path
parent = os.path.dirname(os.path.abspath(__file__))
if not parent in sys.path:
sys.path.insert(0, parent)
# Discover tests
os.environ['DJANGO_SETTINGS_MODULE'] = 'djedi.tests.settings'
unittest.defaultTestLoader.discover('djedi')
# Run tests
import django
if hasattr(django, 'setup'):
django.setup()
if django.VERSION < (1,7):
from django.test.simple import DjangoTestSuiteRunner as TestRunner
else:
from django.test.runner import DiscoverRunner as TestRunner
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
exit_code = runner.run_tests(['djedi'])
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
import os
import sys
import six
if six.PY2:
import unittest2 as unittest
else:
import unittest
def main():
# Configure python path
parent = os.path.dirname(os.path.abspath(__file__))
if not parent in sys.path:
sys.path.insert(0, parent)
# Discover tests
os.environ['DJANGO_SETTINGS_MODULE'] = 'djedi.tests.settings'
unittest.defaultTestLoader.discover('djedi')
- import django
-
- if hasattr(django, "setup"):
- django.setup()
-
# Run tests
import django
if hasattr(django, 'setup'):
django.setup()
if django.VERSION < (1,7):
from django.test.simple import DjangoTestSuiteRunner as TestRunner
else:
from django.test.runner import DiscoverRunner as TestRunner
runner = TestRunner(verbosity=1, interactive=True, failfast=False)
exit_code = runner.run_tests(['djedi'])
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
9be04ea1030b423b7414dbd386ae2db2f4761f07
|
third_party/bunch/bunch/python3_compat.py
|
third_party/bunch/bunch/python3_compat.py
|
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
Fix Python 3 version detection in bunch
|
Fix Python 3 version detection in bunch
|
Python
|
apache-2.0
|
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
|
- import platform
+ import sys
- _IS_PYTHON_3 = (platform.version() >= '3')
+ _IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
Fix Python 3 version detection in bunch
|
## Code Before:
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
## Instruction:
Fix Python 3 version detection in bunch
## Code After:
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
- import platform
+ import sys
- _IS_PYTHON_3 = (platform.version() >= '3')
? ^^^^^^^^ ^^
+ _IS_PYTHON_3 = (sys.version[0] >= '3')
? ^^^ ^^^
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
a15813399992fb8bbf951854a218e30e4cddd717
|
prime-factors/prime_factors.py
|
prime-factors/prime_factors.py
|
def prime_factors(number):
factors = []
if number > 1:
for num in range(2, number):
if (number % num) == 0:
factors.append(num)
return factors
break
else:
return True
else:
return factors
|
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
|
Add two more arguments with function
|
Add two more arguments with function
|
Python
|
mit
|
amalshehu/exercism-python
|
- def prime_factors(number):
+ def prime_factors(number, n=2, factors=None):
+ if factors is None:
- factors = []
+ factors = []
- if number > 1:
- for num in range(2, number):
+ for num in range(n, number):
- if (number % num) == 0:
+ if (number % num) == 0:
- factors.append(num)
+ factors.append(num)
+ return prime_factors(number // num, num, factors)
- return factors
- break
- else:
- return True
else:
return factors
|
Add two more arguments with function
|
## Code Before:
def prime_factors(number):
factors = []
if number > 1:
for num in range(2, number):
if (number % num) == 0:
factors.append(num)
return factors
break
else:
return True
else:
return factors
## Instruction:
Add two more arguments with function
## Code After:
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
|
- def prime_factors(number):
+ def prime_factors(number, n=2, factors=None):
+ if factors is None:
- factors = []
+ factors = []
? ++++
- if number > 1:
- for num in range(2, number):
? ---- ^
+ for num in range(n, number):
? ^
- if (number % num) == 0:
? ----
+ if (number % num) == 0:
- factors.append(num)
? ----
+ factors.append(num)
+ return prime_factors(number // num, num, factors)
- return factors
- break
- else:
- return True
else:
return factors
|
b540d5c3943f6a21232428ecf717667c6beb48d5
|
dmp/__init__.py
|
dmp/__init__.py
|
import dmp
import rest
__author__ = 'Mark McDowall'
__version__ = 'v0.0'
__license__ = 'Apache 2.0'
|
import dmp.dmp
import rest.rest
__author__ = 'Mark McDowall'
__version__ = '0.0'
__license__ = 'Apache 2.0'
|
Change to try and improve importability
|
Change to try and improve importability
|
Python
|
apache-2.0
|
Multiscale-Genomics/mg-dm-api,Multiscale-Genomics/mg-dm-api
|
- import dmp
+ import dmp.dmp
- import rest
+ import rest.rest
__author__ = 'Mark McDowall'
- __version__ = 'v0.0'
+ __version__ = '0.0'
__license__ = 'Apache 2.0'
|
Change to try and improve importability
|
## Code Before:
import dmp
import rest
__author__ = 'Mark McDowall'
__version__ = 'v0.0'
__license__ = 'Apache 2.0'
## Instruction:
Change to try and improve importability
## Code After:
import dmp.dmp
import rest.rest
__author__ = 'Mark McDowall'
__version__ = '0.0'
__license__ = 'Apache 2.0'
|
- import dmp
+ import dmp.dmp
? ++++
- import rest
+ import rest.rest
? +++++
__author__ = 'Mark McDowall'
- __version__ = 'v0.0'
? -
+ __version__ = '0.0'
__license__ = 'Apache 2.0'
|
bd3d97cefe61886ab8c2fa24eecd624ca1c6f751
|
profile_collection/startup/90-settings.py
|
profile_collection/startup/90-settings.py
|
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_scanid(name, doc):
if name == 'start':
print('Scan ID:', doc['scan_id'])
print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
Remove redundant Scan ID printing (there is another one elsewhere)
|
Remove redundant Scan ID printing (there is another one elsewhere)
|
Python
|
bsd-2-clause
|
NSLS-II-CHX/ipython_ophyd,NSLS-II-CHX/ipython_ophyd
|
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
-
-
-
- def print_scanid(name, doc):
- if name == 'start':
- print('Scan ID:', doc['scan_id'])
- print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
Remove redundant Scan ID printing (there is another one elsewhere)
|
## Code Before:
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_scanid(name, doc):
if name == 'start':
print('Scan ID:', doc['scan_id'])
print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
## Instruction:
Remove redundant Scan ID printing (there is another one elsewhere)
## Code After:
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
-
-
-
- def print_scanid(name, doc):
- if name == 'start':
- print('Scan ID:', doc['scan_id'])
- print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
7d7043560f26c31346472b6452e8b191729c54a3
|
offsite_storage/settings.py
|
offsite_storage/settings.py
|
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
|
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
|
Use custom endpoint url in AWS_HOST_URL variable
|
Use custom endpoint url in AWS_HOST_URL variable
|
Python
|
bsd-3-clause
|
mirumee/django-offsite-storage
|
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
- AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
+ AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
|
Use custom endpoint url in AWS_HOST_URL variable
|
## Code Before:
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
## Instruction:
Use custom endpoint url in AWS_HOST_URL variable
## Code After:
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
|
from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
- AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
+ AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
|
5a0659ed9e4f8085009c04ade4f66cbd5d3c94bd
|
openedx/core/djangoapps/user_api/accounts/permissions.py
|
openedx/core/djangoapps/user_api/accounts/permissions.py
|
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
|
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
Replace group with static username
|
Replace group with static username
|
Python
|
agpl-3.0
|
appsembler/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,mitocw/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,cpennington/edx-platform,stvstnfrd/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,cpennington/edx-platform,appsembler/edx-platform,ESOedX/edx-platform,msegado/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx/edx-platform,edx/edx-platform,jolyonb/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,mitocw/edx-platform,arbrandes/edx-platform,jolyonb/edx-platform,angelapper/edx-platform,jolyonb/edx-platform,angelapper/edx-platform,ESOedX/edx-platform,msegado/edx-platform,mitocw/edx-platform,EDUlib/edx-platform,cpennington/edx-platform,edx/edx-platform,cpennington/edx-platform,appsembler/edx-platform
|
from __future__ import unicode_literals
+ from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
- return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
+ return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
Replace group with static username
|
## Code Before:
from __future__ import unicode_literals
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
## Instruction:
Replace group with static username
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
from __future__ import unicode_literals
+ from django.conf import settings
from rest_framework import permissions
USERNAME_REPLACEMENT_GROUP = "username_replacement_admin"
class CanDeactivateUser(permissions.BasePermission):
"""
Grants access to AccountDeactivationView if the requesting user is a superuser
or has the explicit permission to deactivate a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('student.can_deactivate_users')
class CanRetireUser(permissions.BasePermission):
"""
Grants access to the various retirement API endpoints if the requesting user is
a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to
retire a User account.
"""
def has_permission(self, request, view):
return request.user.has_perm('accounts.can_retire_user')
class CanReplaceUsername(permissions.BasePermission):
"""
Grants access to the Username Replacement API for anyone in the group,
including the service user.
"""
def has_permission(self, request, view):
- return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
+ return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
|
eac2211956d49d9da957492bbac1bcdc85b1e40d
|
openprescribing/frontend/management/commands/load_development_data.py
|
openprescribing/frontend/management/commands/load_development_data.py
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.models import ImportLog, PPUSaving
from frontend.tests.test_api_spending import ApiTestBase, TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
ApiTestBase.setUpTestData()
max_ppu_date = PPUSaving.objects.order_by('-date')[0].date
ImportLog.objects.create(current_at=max_ppu_date, category='ppu')
|
Add extra development data so the All England page loads
|
Add extra development data so the All England page loads
Previously the absence of the PPU ImportLog entry caused the page to
throw an error.
|
Python
|
mit
|
ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
+ from frontend.models import ImportLog, PPUSaving
- from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
+ from frontend.tests.test_api_spending import ApiTestBase, TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
+ ApiTestBase.setUpTestData()
+ max_ppu_date = PPUSaving.objects.order_by('-date')[0].date
+ ImportLog.objects.create(current_at=max_ppu_date, category='ppu')
|
Add extra development data so the All England page loads
|
## Code Before:
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
## Instruction:
Add extra development data so the All England page loads
## Code After:
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.models import ImportLog, PPUSaving
from frontend.tests.test_api_spending import ApiTestBase, TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
ApiTestBase.setUpTestData()
max_ppu_date = PPUSaving.objects.order_by('-date')[0].date
ImportLog.objects.create(current_at=max_ppu_date, category='ppu')
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
+ from frontend.models import ImportLog, PPUSaving
- from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
+ from frontend.tests.test_api_spending import ApiTestBase, TestAPISpendingViewsPPUTable
? +++++++++++++
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
+ ApiTestBase.setUpTestData()
+ max_ppu_date = PPUSaving.objects.order_by('-date')[0].date
+ ImportLog.objects.create(current_at=max_ppu_date, category='ppu')
|
373b0210483839b7ac5b4fd8eb0bcfdfe8d63d83
|
begood_sites/fields.py
|
begood_sites/fields.py
|
from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
|
from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
if 'to' in defaults:
del defaults['to']
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
if 'to' in kwargs:
del kwargs['to']
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
|
Fix problem with South migrations.
|
Fix problem with South migrations.
|
Python
|
mit
|
AGoodId/begood-sites
|
from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
+ if 'to' in defaults:
+ del defaults['to']
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
+ if 'to' in kwargs:
+ del kwargs['to']
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
|
Fix problem with South migrations.
|
## Code Before:
from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
## Instruction:
Fix problem with South migrations.
## Code After:
from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
if 'to' in defaults:
del defaults['to']
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
if 'to' in kwargs:
del kwargs['to']
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
|
from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
+ if 'to' in defaults:
+ del defaults['to']
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
+ if 'to' in kwargs:
+ del kwargs['to']
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
|
e4401ba44a5faea7efcd262fde1b5bf1085fbe30
|
wagtail/wagtailimages/utils.py
|
wagtail/wagtailimages/utils.py
|
import os
from PIL import Image
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
def validate_image_format(f):
# Check file extension
extension = os.path.splitext(f.name)[1].lower()[1:]
if extension == 'jpg':
extension = 'jpeg'
if extension not in ['gif', 'jpeg', 'png']:
raise ValidationError(_("Not a valid image. Please use a gif, jpeg or png file with the correct file extension."))
if f.closed:
# Reopen the file
file = open(os.path.join(settings.MEDIA_ROOT, f.name), 'rb')
close = True
else:
# Seek to first byte but save position to be restored later
file_position = f.tell()
f.seek(0)
file = f
close = False
# Open image file
image = Image.open(file)
# Check that the internal format matches the extension
if image.format.upper() != extension.upper():
raise ValidationError(_("Not a valid %s image. Please use a gif, jpeg or png file with the correct file extension.") % (extension.upper()))
# Close/restore file
if close:
file.close()
else:
f.seek(file_position)
|
import os
from PIL import Image
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
def validate_image_format(f):
# Check file extension
extension = os.path.splitext(f.name)[1].lower()[1:]
if extension == 'jpg':
extension = 'jpeg'
if extension not in ['gif', 'jpeg', 'png']:
raise ValidationError(_("Not a valid image. Please use a gif, jpeg or png file with the correct file extension."))
# Open image file
file_position = f.tell()
f.seek(0)
image = Image.open(f)
f.seek(file_position)
# Check that the internal format matches the extension
if image.format.upper() != extension.upper():
raise ValidationError(_("Not a valid %s image. Please use a gif, jpeg or png file with the correct file extension.") % (extension.upper()))
|
Revert "Reopen images for validation if they are closed"
|
Revert "Reopen images for validation if they are closed"
This reverts commit 7d43b1cf6eda74c86209a4cae0d71557ce9bdbc0.
|
Python
|
bsd-3-clause
|
benemery/wagtail,serzans/wagtail,davecranwell/wagtail,hamsterbacke23/wagtail,takeshineshiro/wagtail,mjec/wagtail,nealtodd/wagtail,kurtrwall/wagtail,gasman/wagtail,stevenewey/wagtail,WQuanfeng/wagtail,100Shapes/wagtail,chimeno/wagtail,nutztherookie/wagtail,wagtail/wagtail,chimeno/wagtail,iansprice/wagtail,rv816/wagtail,jorge-marques/wagtail,janusnic/wagtail,timorieber/wagtail,chimeno/wagtail,Klaudit/wagtail,mephizzle/wagtail,jnns/wagtail,darith27/wagtail,100Shapes/wagtail,benjaoming/wagtail,Klaudit/wagtail,gasman/wagtail,iho/wagtail,KimGlazebrook/wagtail-experiment,rv816/wagtail,mikedingjan/wagtail,jnns/wagtail,timorieber/wagtail,hamsterbacke23/wagtail,tangentlabs/wagtail,quru/wagtail,FlipperPA/wagtail,WQuanfeng/wagtail,Pennebaker/wagtail,takeflight/wagtail,inonit/wagtail,jnns/wagtail,zerolab/wagtail,benjaoming/wagtail,chrxr/wagtail,darith27/wagtail,jorge-marques/wagtail,Pennebaker/wagtail,tangentlabs/wagtail,quru/wagtail,janusnic/wagtail,rsalmaso/wagtail,nrsimha/wagtail,takeflight/wagtail,tangentlabs/wagtail,kaedroho/wagtail,benjaoming/wagtail,gasman/wagtail,nutztherookie/wagtail,JoshBarr/wagtail,mephizzle/wagtail,rv816/wagtail,chimeno/wagtail,darith27/wagtail,serzans/wagtail,nutztherookie/wagtail,jorge-marques/wagtail,bjesus/wagtail,WQuanfeng/wagtail,inonit/wagtail,nutztherookie/wagtail,nimasmi/wagtail,mayapurmedia/wagtail,janusnic/wagtail,mixxorz/wagtail,KimGlazebrook/wagtail-experiment,Tivix/wagtail,willcodefortea/wagtail,zerolab/wagtail,iho/wagtail,taedori81/wagtail,gasman/wagtail,chrxr/wagtail,torchbox/wagtail,dresiu/wagtail,nimasmi/wagtail,nrsimha/wagtail,inonit/wagtail,m-sanders/wagtail,JoshBarr/wagtail,takeshineshiro/wagtail,gogobook/wagtail,benjaoming/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,chrxr/wagtail,lojack/wagtail,kurtrwall/wagtail,100Shapes/wagtail,jorge-marques/wagtail,kurtrwall/wagtail,nrsimha/wagtail,takeshineshiro/wagtail,inonit/wagtail,willcodefortea/wagtail,mjec/wagtail,zerolab/wagtail,bjesus/wagtail,Toshakins/wagtail,takeflight/wagtail,hamsterbacke23/wagtail,rjsproxy/wagtail,davecranwell/wagtail,marctc/wagtail,benemery/wagtail,kurtw/wagtail,jordij/wagtail,nealtodd/wagtail,wagtail/wagtail,stevenewey/wagtail,KimGlazebrook/wagtail-experiment,rsalmaso/wagtail,Pennebaker/wagtail,Toshakins/wagtail,iansprice/wagtail,taedori81/wagtail,JoshBarr/wagtail,torchbox/wagtail,mixxorz/wagtail,JoshBarr/wagtail,gasman/wagtail,hanpama/wagtail,WQuanfeng/wagtail,nimasmi/wagtail,gogobook/wagtail,KimGlazebrook/wagtail-experiment,Tivix/wagtail,zerolab/wagtail,mephizzle/wagtail,helenwarren/pied-wagtail,marctc/wagtail,jnns/wagtail,takeflight/wagtail,wagtail/wagtail,torchbox/wagtail,benemery/wagtail,jordij/wagtail,kurtw/wagtail,davecranwell/wagtail,taedori81/wagtail,timorieber/wagtail,nilnvoid/wagtail,jordij/wagtail,kaedroho/wagtail,wagtail/wagtail,helenwarren/pied-wagtail,takeshineshiro/wagtail,nrsimha/wagtail,dresiu/wagtail,marctc/wagtail,lojack/wagtail,rsalmaso/wagtail,m-sanders/wagtail,zerolab/wagtail,janusnic/wagtail,chimeno/wagtail,taedori81/wagtail,FlipperPA/wagtail,marctc/wagtail,dresiu/wagtail,kurtw/wagtail,mixxorz/wagtail,m-sanders/wagtail,nilnvoid/wagtail,jordij/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,kurtrwall/wagtail,wagtail/wagtail,torchbox/wagtail,dresiu/wagtail,stevenewey/wagtail,mayapurmedia/wagtail,mikedingjan/wagtail,nilnvoid/wagtail,kaedroho/wagtail,rjsproxy/wagtail,darith27/wagtail,mjec/wagtail,thenewguy/wagtail,lojack/wagtail,jorge-marques/wagtail,Klaudit/wagtail,rjsproxy/wagtail,willcodefortea/wagtail,stevenewey/wagtail,hanpama/wagtail,mikedingjan/wagtail,hanpama/wagtail,nealtodd/wagtail,serzans/wagtail,mjec/wagtail,Toshakins/wagtail,iansprice/wagtail,rjsproxy/wagtail,serzans/wagtail,mayapurmedia/wagtail,willcodefortea/wagtail,mixxorz/wagtail,hamsterbacke23/wagtail,Klaudit/wagtail,m-sanders/wagtail,Toshakins/wagtail,hanpama/wagtail,quru/wagtail,Tivix/wagtail,bjesus/wagtail,timorieber/wagtail,dresiu/wagtail,quru/wagtail,mephizzle/wagtail,kaedroho/wagtail,thenewguy/wagtail,davecranwell/wagtail,rsalmaso/wagtail,gogobook/wagtail,helenwarren/pied-wagtail,kurtw/wagtail,thenewguy/wagtail,gogobook/wagtail,mikedingjan/wagtail,rsalmaso/wagtail,iansprice/wagtail,thenewguy/wagtail,bjesus/wagtail,thenewguy/wagtail,mixxorz/wagtail,nealtodd/wagtail,iho/wagtail,tangentlabs/wagtail,nimasmi/wagtail,FlipperPA/wagtail,rv816/wagtail,Pennebaker/wagtail,iho/wagtail,chrxr/wagtail,Tivix/wagtail,taedori81/wagtail,benemery/wagtail
|
import os
from PIL import Image
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
- from django.conf import settings
def validate_image_format(f):
# Check file extension
extension = os.path.splitext(f.name)[1].lower()[1:]
if extension == 'jpg':
extension = 'jpeg'
if extension not in ['gif', 'jpeg', 'png']:
raise ValidationError(_("Not a valid image. Please use a gif, jpeg or png file with the correct file extension."))
- if f.closed:
- # Reopen the file
- file = open(os.path.join(settings.MEDIA_ROOT, f.name), 'rb')
- close = True
- else:
- # Seek to first byte but save position to be restored later
- file_position = f.tell()
- f.seek(0)
- file = f
- close = False
-
# Open image file
+ file_position = f.tell()
+ f.seek(0)
- image = Image.open(file)
+ image = Image.open(f)
+ f.seek(file_position)
# Check that the internal format matches the extension
if image.format.upper() != extension.upper():
raise ValidationError(_("Not a valid %s image. Please use a gif, jpeg or png file with the correct file extension.") % (extension.upper()))
- # Close/restore file
- if close:
- file.close()
- else:
- f.seek(file_position)
-
|
Revert "Reopen images for validation if they are closed"
|
## Code Before:
import os
from PIL import Image
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
def validate_image_format(f):
# Check file extension
extension = os.path.splitext(f.name)[1].lower()[1:]
if extension == 'jpg':
extension = 'jpeg'
if extension not in ['gif', 'jpeg', 'png']:
raise ValidationError(_("Not a valid image. Please use a gif, jpeg or png file with the correct file extension."))
if f.closed:
# Reopen the file
file = open(os.path.join(settings.MEDIA_ROOT, f.name), 'rb')
close = True
else:
# Seek to first byte but save position to be restored later
file_position = f.tell()
f.seek(0)
file = f
close = False
# Open image file
image = Image.open(file)
# Check that the internal format matches the extension
if image.format.upper() != extension.upper():
raise ValidationError(_("Not a valid %s image. Please use a gif, jpeg or png file with the correct file extension.") % (extension.upper()))
# Close/restore file
if close:
file.close()
else:
f.seek(file_position)
## Instruction:
Revert "Reopen images for validation if they are closed"
## Code After:
import os
from PIL import Image
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
def validate_image_format(f):
# Check file extension
extension = os.path.splitext(f.name)[1].lower()[1:]
if extension == 'jpg':
extension = 'jpeg'
if extension not in ['gif', 'jpeg', 'png']:
raise ValidationError(_("Not a valid image. Please use a gif, jpeg or png file with the correct file extension."))
# Open image file
file_position = f.tell()
f.seek(0)
image = Image.open(f)
f.seek(file_position)
# Check that the internal format matches the extension
if image.format.upper() != extension.upper():
raise ValidationError(_("Not a valid %s image. Please use a gif, jpeg or png file with the correct file extension.") % (extension.upper()))
|
import os
from PIL import Image
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
- from django.conf import settings
def validate_image_format(f):
# Check file extension
extension = os.path.splitext(f.name)[1].lower()[1:]
if extension == 'jpg':
extension = 'jpeg'
if extension not in ['gif', 'jpeg', 'png']:
raise ValidationError(_("Not a valid image. Please use a gif, jpeg or png file with the correct file extension."))
- if f.closed:
- # Reopen the file
- file = open(os.path.join(settings.MEDIA_ROOT, f.name), 'rb')
- close = True
- else:
- # Seek to first byte but save position to be restored later
- file_position = f.tell()
- f.seek(0)
- file = f
- close = False
-
# Open image file
+ file_position = f.tell()
+ f.seek(0)
- image = Image.open(file)
? ---
+ image = Image.open(f)
+ f.seek(file_position)
# Check that the internal format matches the extension
if image.format.upper() != extension.upper():
raise ValidationError(_("Not a valid %s image. Please use a gif, jpeg or png file with the correct file extension.") % (extension.upper()))
-
- # Close/restore file
- if close:
- file.close()
- else:
- f.seek(file_position)
|
6dfc6cffb2594b420843ce7021988f78de2b4faf
|
estmator_project/estmator_project/test.py
|
estmator_project/estmator_project/test.py
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
Test commit for travis setup
|
Test commit for travis setup
|
Python
|
mit
|
Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
+ """Sublcassed TestCase for project."""
pass
|
Test commit for travis setup
|
## Code Before:
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
## Instruction:
Test commit for travis setup
## Code After:
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
+ """Sublcassed TestCase for project."""
pass
|
e43ea9602c272119f18e270a0ee138401ee0b02a
|
digit_guesser.py
|
digit_guesser.py
|
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
clf = svm.SVC(gamma=0.0001, C=100)
training_set = digits.data[:-10]
labels = digits.target[:-10]
x, y = training_set, labels
clf.fit(x, y)
for i in range(10):
print("Prediction: {}".format(clf.predict([digits.data[-i]])))
print("Digit: [{}]".format(digits.target[-i]))
# print('Prediction: ', clf.predict([digits.data[-1]]))
# plt.imshow(digits.images[-1], cmap=plt.cm.gray_r, interpolation='nearest')
# plt.show()
|
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
clf = svm.SVC(gamma=0.0001, C=100)
training_set = digits.data[:-10]
training_labels = digits.target[:-10]
testing_set = digits.data[-10:]
testing_labels = digits.target[-10:]
x, y = training_set, training_labels
clf.fit(x, y)
for i in range(10):
print("Test set: {}. Predicted: {}".format(testing_labels[i], clf.predict([testing_set[i]])[0]))
|
Make variables self descriptive and create a testing set.
|
Make variables self descriptive and create a testing set.
|
Python
|
mit
|
jeancsil/machine-learning
|
- import matplotlib.pyplot as plt
-
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
clf = svm.SVC(gamma=0.0001, C=100)
training_set = digits.data[:-10]
- labels = digits.target[:-10]
+ training_labels = digits.target[:-10]
+ testing_set = digits.data[-10:]
+ testing_labels = digits.target[-10:]
+
- x, y = training_set, labels
+ x, y = training_set, training_labels
clf.fit(x, y)
for i in range(10):
+ print("Test set: {}. Predicted: {}".format(testing_labels[i], clf.predict([testing_set[i]])[0]))
- print("Prediction: {}".format(clf.predict([digits.data[-i]])))
- print("Digit: [{}]".format(digits.target[-i]))
- # print('Prediction: ', clf.predict([digits.data[-1]]))
- # plt.imshow(digits.images[-1], cmap=plt.cm.gray_r, interpolation='nearest')
- # plt.show()
-
|
Make variables self descriptive and create a testing set.
|
## Code Before:
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
clf = svm.SVC(gamma=0.0001, C=100)
training_set = digits.data[:-10]
labels = digits.target[:-10]
x, y = training_set, labels
clf.fit(x, y)
for i in range(10):
print("Prediction: {}".format(clf.predict([digits.data[-i]])))
print("Digit: [{}]".format(digits.target[-i]))
# print('Prediction: ', clf.predict([digits.data[-1]]))
# plt.imshow(digits.images[-1], cmap=plt.cm.gray_r, interpolation='nearest')
# plt.show()
## Instruction:
Make variables self descriptive and create a testing set.
## Code After:
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
clf = svm.SVC(gamma=0.0001, C=100)
training_set = digits.data[:-10]
training_labels = digits.target[:-10]
testing_set = digits.data[-10:]
testing_labels = digits.target[-10:]
x, y = training_set, training_labels
clf.fit(x, y)
for i in range(10):
print("Test set: {}. Predicted: {}".format(testing_labels[i], clf.predict([testing_set[i]])[0]))
|
- import matplotlib.pyplot as plt
-
from sklearn import datasets
from sklearn import svm
digits = datasets.load_digits()
clf = svm.SVC(gamma=0.0001, C=100)
training_set = digits.data[:-10]
- labels = digits.target[:-10]
+ training_labels = digits.target[:-10]
? +++++++++
+ testing_set = digits.data[-10:]
+ testing_labels = digits.target[-10:]
+
- x, y = training_set, labels
+ x, y = training_set, training_labels
? +++++++++
clf.fit(x, y)
for i in range(10):
+ print("Test set: {}. Predicted: {}".format(testing_labels[i], clf.predict([testing_set[i]])[0]))
- print("Prediction: {}".format(clf.predict([digits.data[-i]])))
- print("Digit: [{}]".format(digits.target[-i]))
-
- # print('Prediction: ', clf.predict([digits.data[-1]]))
- # plt.imshow(digits.images[-1], cmap=plt.cm.gray_r, interpolation='nearest')
- # plt.show()
|
e5351bba6cdf7b76da895afda80c18309c7f90eb
|
tests/test_config.py
|
tests/test_config.py
|
from yacron import config
def test_mergedicts():
assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
assert dict(config.mergedicts(
{"a": {'x': 1, 'y': 2, 'z': 3}},
{'a': {'y': 10}, "b": 2})) == \
{"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2}
def test_mergedicts_right_none():
assert dict(config.mergedicts(
{"a": {'x': 1}},
{"a": None, "b": 2})) == \
{"a": {'x': 1}, "b": 2}
def test_mergedicts_lists():
assert dict(config.mergedicts(
{"env": [{'key': 'FOO'}]},
{"env": [{'key': 'BAR'}]})) \
== \
{"env": [{'key': 'FOO'}, {'key': 'BAR'}]}
|
from yacron import config
def test_mergedicts():
assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
assert (dict(config.mergedicts(
{"a": {'x': 1, 'y': 2, 'z': 3}},
{'a': {'y': 10}, "b": 2}
)) == {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2})
def test_mergedicts_right_none():
assert (dict(config.mergedicts(
{"a": {'x': 1}},
{"a": None, "b": 2}
)) == {"a": {'x': 1}, "b": 2})
def test_mergedicts_lists():
assert (dict(config.mergedicts(
{"env": [{'key': 'FOO'}]},
{"env": [{'key': 'BAR'}]}
)) == {"env": [{'key': 'FOO'}, {'key': 'BAR'}]})
|
Replace tabs with spaces, use parens to get rid of backslashes
|
Replace tabs with spaces, use parens to get rid of backslashes
|
Python
|
mit
|
gjcarneiro/yacron
|
from yacron import config
def test_mergedicts():
- assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
+ assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
- assert dict(config.mergedicts(
+ assert (dict(config.mergedicts(
- {"a": {'x': 1, 'y': 2, 'z': 3}},
+ {"a": {'x': 1, 'y': 2, 'z': 3}},
- {'a': {'y': 10}, "b": 2})) == \
+ {'a': {'y': 10}, "b": 2}
- {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2}
+ )) == {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2})
def test_mergedicts_right_none():
- assert dict(config.mergedicts(
+ assert (dict(config.mergedicts(
- {"a": {'x': 1}},
+ {"a": {'x': 1}},
- {"a": None, "b": 2})) == \
+ {"a": None, "b": 2}
- {"a": {'x': 1}, "b": 2}
+ )) == {"a": {'x': 1}, "b": 2})
def test_mergedicts_lists():
- assert dict(config.mergedicts(
+ assert (dict(config.mergedicts(
- {"env": [{'key': 'FOO'}]},
+ {"env": [{'key': 'FOO'}]},
- {"env": [{'key': 'BAR'}]})) \
+ {"env": [{'key': 'BAR'}]}
- == \
- {"env": [{'key': 'FOO'}, {'key': 'BAR'}]}
+ )) == {"env": [{'key': 'FOO'}, {'key': 'BAR'}]})
|
Replace tabs with spaces, use parens to get rid of backslashes
|
## Code Before:
from yacron import config
def test_mergedicts():
assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
assert dict(config.mergedicts(
{"a": {'x': 1, 'y': 2, 'z': 3}},
{'a': {'y': 10}, "b": 2})) == \
{"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2}
def test_mergedicts_right_none():
assert dict(config.mergedicts(
{"a": {'x': 1}},
{"a": None, "b": 2})) == \
{"a": {'x': 1}, "b": 2}
def test_mergedicts_lists():
assert dict(config.mergedicts(
{"env": [{'key': 'FOO'}]},
{"env": [{'key': 'BAR'}]})) \
== \
{"env": [{'key': 'FOO'}, {'key': 'BAR'}]}
## Instruction:
Replace tabs with spaces, use parens to get rid of backslashes
## Code After:
from yacron import config
def test_mergedicts():
assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
assert (dict(config.mergedicts(
{"a": {'x': 1, 'y': 2, 'z': 3}},
{'a': {'y': 10}, "b": 2}
)) == {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2})
def test_mergedicts_right_none():
assert (dict(config.mergedicts(
{"a": {'x': 1}},
{"a": None, "b": 2}
)) == {"a": {'x': 1}, "b": 2})
def test_mergedicts_lists():
assert (dict(config.mergedicts(
{"env": [{'key': 'FOO'}]},
{"env": [{'key': 'BAR'}]}
)) == {"env": [{'key': 'FOO'}, {'key': 'BAR'}]})
|
from yacron import config
def test_mergedicts():
- assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
? ^
+ assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
? ^^^^
def test_mergedicts_nested():
- assert dict(config.mergedicts(
? ^
+ assert (dict(config.mergedicts(
? ^^^^ +
- {"a": {'x': 1, 'y': 2, 'z': 3}},
? ^
+ {"a": {'x': 1, 'y': 2, 'z': 3}},
? ^^^^
- {'a': {'y': 10}, "b": 2})) == \
? ^ -------
+ {'a': {'y': 10}, "b": 2}
? ^^^^
- {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2}
? ^^
+ )) == {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2})
? ^^^^^^^^^^ +
def test_mergedicts_right_none():
- assert dict(config.mergedicts(
? ^
+ assert (dict(config.mergedicts(
? ^^^^ +
- {"a": {'x': 1}},
? ^
+ {"a": {'x': 1}},
? ^^^^
- {"a": None, "b": 2})) == \
? ^ -------
+ {"a": None, "b": 2}
? ^^^^
- {"a": {'x': 1}, "b": 2}
? ^^
+ )) == {"a": {'x': 1}, "b": 2})
? ^^^^^^^^^^ +
def test_mergedicts_lists():
- assert dict(config.mergedicts(
? ^
+ assert (dict(config.mergedicts(
? ^^^^ +
- {"env": [{'key': 'FOO'}]},
? ^
+ {"env": [{'key': 'FOO'}]},
? ^^^^
- {"env": [{'key': 'BAR'}]})) \
? ^ ----
+ {"env": [{'key': 'BAR'}]}
? ^^^^
- == \
- {"env": [{'key': 'FOO'}, {'key': 'BAR'}]}
? ^^
+ )) == {"env": [{'key': 'FOO'}, {'key': 'BAR'}]})
? ^^^^^^^^^^ +
|
83ed5ca9bc388dbe9b2d82510842a99b3a2e5ce7
|
src/personalisation/middleware.py
|
src/personalisation/middleware.py
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
Create empty 'segments' object in session if none exists
|
Create empty 'segments' object in session if none exists
|
Python
|
mit
|
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
+ if not request.session.get('segments'):
+ request.session['segments'] = []
+
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
Create empty 'segments' object in session if none exists
|
## Code Before:
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
## Instruction:
Create empty 'segments' object in session if none exists
## Code After:
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
+ if not request.session.get('segments'):
+ request.session['segments'] = []
+
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
25cebf23c84d8e1136a3e2b503e574aa1c7263e6
|
dbaas_zabbix/dbaas_api.py
|
dbaas_zabbix/dbaas_api.py
|
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
|
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
|
Add databaseinfra get engine name
|
Add databaseinfra get engine name
|
Python
|
bsd-3-clause
|
globocom/dbaas-zabbix,globocom/dbaas-zabbix
|
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
+ def get_databaseinfra_engine_name(self):
+ return self.databaseinfra.engine.engine_type.name
+
|
Add databaseinfra get engine name
|
## Code Before:
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
## Instruction:
Add databaseinfra get engine name
## Code After:
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
|
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
+
+ def get_databaseinfra_engine_name(self):
+ return self.databaseinfra.engine.engine_type.name
|
5d93d1fb887d76d6fbe0a2f699e973ed9f6e7556
|
tests/test_navigation.py
|
tests/test_navigation.py
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
Delete debug comments and tool
|
Delete debug comments and tool
|
Python
|
agpl-3.0
|
PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
- page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
- # check titles for all sub-toctree content
- # list_url = page.split("/")[3::]
- # new_url = "/".join(list_url)
- # test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
|
Delete debug comments and tool
|
## Code Before:
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
## Instruction:
Delete debug comments and tool
## Code After:
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
|
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
- page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
- # check titles for all sub-toctree content
- # list_url = page.split("/")[3::]
- # new_url = "/".join(list_url)
- # test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
|
ae21001fea38e9b8e4af34654c48b415e419f319
|
core/utils.py
|
core/utils.py
|
from django.utils.duration import _get_duration_components
from datetime import timedelta
def duration_string_from_delta(delta):
seconds = delta.total_seconds()
split = str(seconds/3600).split('.')
print split
hours = int(split[0])
minutes = int(float('.'+split[1])*60)
string = '{}:{:02d}'.format(hours, minutes)
return string
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if hours is None:
hours = 0
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
raise ValueError('Could not parse duration.')
def duration_string(duration):
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
return string
|
from django.utils.duration import _get_duration_components
from datetime import timedelta
def duration_string_from_delta(delta):
seconds = delta.total_seconds()
split = str(seconds/3600).split('.')
hours = int(split[0])
minutes = int(float('.'+split[1])*60)
string = '{}:{:02d}'.format(hours, minutes)
return string
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if hours is None:
hours = 0
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
raise ValueError('Could not parse duration.')
def duration_string(duration):
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
return string
|
Remove debugging print statement, opps
|
Remove debugging print statement, opps
|
Python
|
bsd-2-clause
|
muhleder/timestrap,muhleder/timestrap,overshard/timestrap,cdubz/timestrap,Leahelisabeth/timestrap,Leahelisabeth/timestrap,Leahelisabeth/timestrap,cdubz/timestrap,Leahelisabeth/timestrap,overshard/timestrap,muhleder/timestrap,cdubz/timestrap,overshard/timestrap
|
from django.utils.duration import _get_duration_components
from datetime import timedelta
def duration_string_from_delta(delta):
seconds = delta.total_seconds()
split = str(seconds/3600).split('.')
- print split
hours = int(split[0])
minutes = int(float('.'+split[1])*60)
string = '{}:{:02d}'.format(hours, minutes)
return string
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if hours is None:
hours = 0
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
raise ValueError('Could not parse duration.')
def duration_string(duration):
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
return string
|
Remove debugging print statement, opps
|
## Code Before:
from django.utils.duration import _get_duration_components
from datetime import timedelta
def duration_string_from_delta(delta):
seconds = delta.total_seconds()
split = str(seconds/3600).split('.')
print split
hours = int(split[0])
minutes = int(float('.'+split[1])*60)
string = '{}:{:02d}'.format(hours, minutes)
return string
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if hours is None:
hours = 0
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
raise ValueError('Could not parse duration.')
def duration_string(duration):
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
return string
## Instruction:
Remove debugging print statement, opps
## Code After:
from django.utils.duration import _get_duration_components
from datetime import timedelta
def duration_string_from_delta(delta):
seconds = delta.total_seconds()
split = str(seconds/3600).split('.')
hours = int(split[0])
minutes = int(float('.'+split[1])*60)
string = '{}:{:02d}'.format(hours, minutes)
return string
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if hours is None:
hours = 0
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
raise ValueError('Could not parse duration.')
def duration_string(duration):
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
return string
|
from django.utils.duration import _get_duration_components
from datetime import timedelta
def duration_string_from_delta(delta):
seconds = delta.total_seconds()
split = str(seconds/3600).split('.')
- print split
hours = int(split[0])
minutes = int(float('.'+split[1])*60)
string = '{}:{:02d}'.format(hours, minutes)
return string
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if hours is None:
hours = 0
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
raise ValueError('Could not parse duration.')
def duration_string(duration):
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
return string
|
3beffa750d68c2104b740193f0386be464829a1a
|
libpb/__init__.py
|
libpb/__init__.py
|
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
exit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
raise SystemExit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
Use SystemExit, not exit() to initiate a shutdown.
|
Use SystemExit, not exit() to initiate a shutdown.
exit() has unintented side affects, such as closing stdin, that are
undesired as stdin is assumed to be writable while libpb/event/run
unwinds (i.e. Top monitor).
|
Python
|
bsd-2-clause
|
DragonSA/portbuilder,DragonSA/portbuilder
|
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
- exit(254)
+ raise SystemExit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
Use SystemExit, not exit() to initiate a shutdown.
|
## Code Before:
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
exit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
## Instruction:
Use SystemExit, not exit() to initiate a shutdown.
## Code After:
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
raise SystemExit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
- exit(254)
+ raise SystemExit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
1f5d52f18df2fba70b53acd681ebb381f532adff
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
# XXX is this the right scope for this? This will remove log/ at the end of
# the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
Document expected behaviour instead of leaving XXX comment
|
Document expected behaviour instead of leaving XXX comment
|
Python
|
agpl-3.0
|
wakermahmud/sync-engine,ErinCall/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,nylas/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,rmasters/inbox,rmasters/inbox,Eagles2F/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,wakermahmud/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,nylas/sync-engine,rmasters/inbox,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,rmasters/inbox
|
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
- # XXX is this the right scope for this? This will remove log/ at the end of
- # the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
+
+ Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
Document expected behaviour instead of leaving XXX comment
|
## Code Before:
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
# XXX is this the right scope for this? This will remove log/ at the end of
# the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
## Instruction:
Document expected behaviour instead of leaving XXX comment
## Code After:
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
import pytest
import os
from shutil import rmtree
TEST_CONFIG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config.cfg')
@pytest.fixture(scope='session', autouse=True)
def config():
from inbox.server.config import load_config, config
load_config(filename=TEST_CONFIG)
return config
- # XXX is this the right scope for this? This will remove log/ at the end of
- # the test session.
@pytest.fixture(scope='session')
def log(request, config):
""" Returns root server logger. For others loggers, use this fixture
for setup but then call inbox.server.log.get_logger().
+
+ Testing log directory is removed at the end of the test run!
"""
from inbox.server.log import configure_general_logging
def remove_logs():
rmtree(config['LOGDIR'], ignore_errors=True)
request.addfinalizer(remove_logs)
return configure_general_logging()
|
e73409c17c89ef54f5c7e807059b229517e77617
|
mailchute/smtpd/mailchute.py
|
mailchute/smtpd/mailchute.py
|
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
return (allowed_receiver_domain is None
or recipient_domain == settings.RECEIVER_DOMAIN)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
|
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
recipient_domain = recipient.split('@')[1].lower()
allowed_receiver_domains = settings.RECEIVER_DOMAIN
if allowed_receiver_domains:
allowed_receiver_domains = allowed_receiver_domains.split(',')
return (allowed_receiver_domains is None
or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
|
Handle multiple receiver domain properly
|
Handle multiple receiver domain properly
|
Python
|
bsd-3-clause
|
kevinjqiu/mailchute,kevinjqiu/mailchute
|
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
- allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
+ allowed_receiver_domains = settings.RECEIVER_DOMAIN
+ if allowed_receiver_domains:
+ allowed_receiver_domains = allowed_receiver_domains.split(',')
- return (allowed_receiver_domain is None
+ return (allowed_receiver_domains is None
- or recipient_domain == settings.RECEIVER_DOMAIN)
+ or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
|
Handle multiple receiver domain properly
|
## Code Before:
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
return (allowed_receiver_domain is None
or recipient_domain == settings.RECEIVER_DOMAIN)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
## Instruction:
Handle multiple receiver domain properly
## Code After:
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
recipient_domain = recipient.split('@')[1].lower()
allowed_receiver_domains = settings.RECEIVER_DOMAIN
if allowed_receiver_domains:
allowed_receiver_domains = allowed_receiver_domains.split(',')
return (allowed_receiver_domains is None
or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
|
import datetime
import smtpd
from email.parser import Parser
from mailchute import db
from mailchute import settings
from mailchute.model import RawMessage, IncomingEmail
from logbook import Logger
logger = Logger(__name__)
class MessageProcessor(object):
def _should_persist(self, recipient):
- allowed_receiver_domain = settings.RECEIVER_DOMAIN
recipient_domain = recipient.split('@')[1].lower()
+ allowed_receiver_domains = settings.RECEIVER_DOMAIN
+ if allowed_receiver_domains:
+ allowed_receiver_domains = allowed_receiver_domains.split(',')
- return (allowed_receiver_domain is None
+ return (allowed_receiver_domains is None
? +
- or recipient_domain == settings.RECEIVER_DOMAIN)
+ or recipient_domain in allowed_receiver_domains)
def __call__(self, peer, mailfrom, recipients, data):
try:
mailfrom = mailfrom.lower()
recipients = list(map(str.lower, recipients))
logger.info(
"Incoming message from {0} to {1}".format(mailfrom, recipients))
email = Parser().parsestr(data)
raw_message = RawMessage(message=data)
for recipient in recipients:
if self._should_persist(recipient):
incoming_email = IncomingEmail(
sender=mailfrom, recipient=recipient,
raw_message=raw_message,
subject=email['subject'],
created_at=datetime.datetime.now(),
)
db.session.add(incoming_email)
else:
logger.info('{} is not an allowed recipient. Skip.'.format(
recipient))
db.session.commit()
logger.info("Message saved")
except Exception as e: # pragma: no cover
logger.exception(e)
db.session.rollback()
class MailchuteSMTPServer(smtpd.SMTPServer):
process_message = MessageProcessor()
|
f668f6066864b1efe3863cdb43b8fee4e08a312b
|
test/test_mk_dirs.py
|
test/test_mk_dirs.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
os.rmdir(Launcher.updatedir)
|
Remove Launcher.updatedir after mkdirs test
|
Remove Launcher.updatedir after mkdirs test
Should go into fixture later
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
+ os.rmdir(Launcher.updatedir)
|
Remove Launcher.updatedir after mkdirs test
|
## Code Before:
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
## Instruction:
Remove Launcher.updatedir after mkdirs test
## Code After:
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
os.rmdir(Launcher.updatedir)
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_makevers import create_update_dir
import os
def test_mk_dirs(create_update_dir):
"""Test that ensures that downlaods directory is created properly"""
assert not os.path.isdir(Launcher.updatedir)
launch = Launcher('MUST_HAVE_SOMETHING', 'urlurlurl')
launch._reset_update_files()
assert os.path.isdir(Launcher.updatedir)
+ os.rmdir(Launcher.updatedir)
|
42804d3182b9b7489583250856e31a8daaef5fa3
|
protolint/__init__.py
|
protolint/__init__.py
|
from . import cli
from . import linter
from . import output
__version__ = (1, 0, 0)
|
__version__ = (1, 0, 0)
from . import cli
from . import linter
from . import output
|
Fix CLI module during build
|
Fix CLI module during build
|
Python
|
mit
|
sgammon/codeclimate-protobuf,sgammon/codeclimate-protobuf
|
+
+ __version__ = (1, 0, 0)
from . import cli
from . import linter
from . import output
- __version__ = (1, 0, 0)
-
|
Fix CLI module during build
|
## Code Before:
from . import cli
from . import linter
from . import output
__version__ = (1, 0, 0)
## Instruction:
Fix CLI module during build
## Code After:
__version__ = (1, 0, 0)
from . import cli
from . import linter
from . import output
|
+
+ __version__ = (1, 0, 0)
from . import cli
from . import linter
from . import output
-
- __version__ = (1, 0, 0)
|
fea95164d03950f0255b1e6567f36040c67da173
|
gnotty/bots/rss.py
|
gnotty/bots/rss.py
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
Allow overridable message formatting in the RSS bot.
|
Allow overridable message formatting in the RSS bot.
|
Python
|
bsd-2-clause
|
spaceone/gnotty,stephenmcd/gnotty,spaceone/gnotty,stephenmcd/gnotty,spaceone/gnotty,stephenmcd/gnotty
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
- for feed in self.feeds:
+ for feed_url in self.feeds:
+ feed = parse(feed_url)
- for item in parse(feed).entries:
+ for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
+ message = self.format_item_message(feed, item)
- self.message_channel("%(title)s: %(id)s" % item)
+ self.message_channel(message)
return
+ def format_item_message(self, feed, item):
+ item["feed_title"] = feed.feed.title or feed.url
+ return "%(title)s: %(id)s (via %(feed_title)s)" % item
+
+
|
Allow overridable message formatting in the RSS bot.
|
## Code Before:
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
## Instruction:
Allow overridable message formatting in the RSS bot.
## Code After:
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
- for feed in self.feeds:
+ for feed_url in self.feeds:
? ++++
+ feed = parse(feed_url)
- for item in parse(feed).entries:
? ------ -
+ for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
+ message = self.format_item_message(feed, item)
- self.message_channel("%(title)s: %(id)s" % item)
? ^^^^^^^ ------------------
+ self.message_channel(message)
? ^^^^^^
return
+
+ def format_item_message(self, feed, item):
+ item["feed_title"] = feed.feed.title or feed.url
+ return "%(title)s: %(id)s (via %(feed_title)s)" % item
+
|
8e7a92bce03ca472bc78bb9df5e2c9cf063c29b7
|
temba/campaigns/tasks.py
|
temba/campaigns/tasks.py
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
try:
push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
Use correct field to get org from
|
Use correct field to get org from
|
Python
|
agpl-3.0
|
harrissoerja/rapidpro,pulilab/rapidpro,pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,harrissoerja/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,Thapelo-Tsotetsi/rapidpro,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,praekelt/rapidpro,harrissoerja/rapidpro,praekelt/rapidpro,reyrodrigues/EU-SMS,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,ewheeler/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,praekelt/rapidpro
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
- for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
+ for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
- push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
+ push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
Use correct field to get org from
|
## Code Before:
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
try:
push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
## Instruction:
Use correct field to get org from
## Code After:
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
try:
push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
from __future__ import unicode_literals
from datetime import datetime
from django.utils import timezone
from djcelery_transactions import task
from redis_cache import get_redis_connection
from .models import Campaign, EventFire
from django.conf import settings
import redis
from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT
from temba.utils.queues import push_task
@task(track_started=True, name='check_campaigns_task') # pragma: no cover
def check_campaigns_task(sched_id=None):
"""
See if any event fires need to be triggered
"""
logger = check_campaigns_task.get_logger()
# get a lock
r = get_redis_connection()
key = 'check_campaigns'
# only do this if we aren't already checking campaigns
if not r.get(key):
with r.lock(key, timeout=3600):
# for each that needs to be fired
- for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'):
? ^^^ ^^^
+ for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'):
? ^^ +++ ^^ +++
try:
- push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
? ^^^
+ push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id))
? ^^ +++
except: # pragma: no cover
logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
|
eccda634d3233cd4f8aaeea372735731fd674c29
|
pysis/labels/__init__.py
|
pysis/labels/__init__.py
|
import io
import functools
import warnings
import six
from .decoder import LabelDecoder
from .encoder import LabelEncoder
def load(stream):
"""Parse an isis label from a stream.
:param stream: a ``.read()``-supporting file-like object containing a label.
if ``stream`` is a string it will be treated as a filename
"""
if isinstance(stream, six.string_types):
with open(stream, 'rb') as fp:
return LabelDecoder().decode(fp)
return LabelDecoder().decode(stream)
def loads(data, encoding='utf-8'):
"""Parse an isis label from a string.
:param data: an isis label as a string
:returns: a dictionary representation of the given isis label
"""
if not isinstance(data, bytes):
data = data.encode(encoding)
return LabelDecoder().decode(data)
def dump(label, stream):
LabelEncoder().encode(label, stream)
def dumps(label):
stream = io.BytesIO()
LabelEncoder().encode(label, stream)
return stream.getvalue()
@functools.wraps(load)
def parse_file_label(stream):
warnings.warn('parse_file_label is deprecated. use load instead.')
return load(stream)
@functools.wraps(loads)
def parse_label(data, encoding='utf-8'):
warnings.warn('parse_label is deprecated. use load instead.')
return loads(data, encoding='utf-8')
|
import io
import warnings
import six
from .decoder import LabelDecoder
from .encoder import LabelEncoder
def load(stream):
"""Parse an isis label from a stream.
:param stream: a ``.read()``-supporting file-like object containing a label.
if ``stream`` is a string it will be treated as a filename
"""
if isinstance(stream, six.string_types):
with open(stream, 'rb') as fp:
return LabelDecoder().decode(fp)
return LabelDecoder().decode(stream)
def loads(data, encoding='utf-8'):
"""Parse an isis label from a string.
:param data: an isis label as a string
:returns: a dictionary representation of the given isis label
"""
if not isinstance(data, bytes):
data = data.encode(encoding)
return LabelDecoder().decode(data)
def dump(label, stream):
LabelEncoder().encode(label, stream)
def dumps(label):
stream = io.BytesIO()
LabelEncoder().encode(label, stream)
return stream.getvalue()
def parse_file_label(stream):
load.__doc__ + """
deprecated:: 0.4.0
Use load instead.
"""
warnings.warn('parse_file_label is deprecated. use load instead.')
return load(stream)
def parse_label(data, encoding='utf-8'):
loads.__doc__ + """
deprecated:: 0.4.0
Use loads instead.
"""
warnings.warn('parse_label is deprecated. use loads instead.')
return loads(data, encoding='utf-8')
|
Add depreciation messages to old parse_label methods.
|
Add depreciation messages to old parse_label methods.
|
Python
|
bsd-3-clause
|
michaelaye/Pysis,wtolson/pysis,wtolson/pysis,michaelaye/Pysis
|
import io
- import functools
import warnings
import six
from .decoder import LabelDecoder
from .encoder import LabelEncoder
def load(stream):
"""Parse an isis label from a stream.
:param stream: a ``.read()``-supporting file-like object containing a label.
if ``stream`` is a string it will be treated as a filename
"""
if isinstance(stream, six.string_types):
with open(stream, 'rb') as fp:
return LabelDecoder().decode(fp)
return LabelDecoder().decode(stream)
def loads(data, encoding='utf-8'):
"""Parse an isis label from a string.
:param data: an isis label as a string
:returns: a dictionary representation of the given isis label
"""
if not isinstance(data, bytes):
data = data.encode(encoding)
return LabelDecoder().decode(data)
def dump(label, stream):
LabelEncoder().encode(label, stream)
def dumps(label):
stream = io.BytesIO()
LabelEncoder().encode(label, stream)
return stream.getvalue()
- @functools.wraps(load)
def parse_file_label(stream):
+ load.__doc__ + """
+ deprecated:: 0.4.0
+ Use load instead.
+ """
warnings.warn('parse_file_label is deprecated. use load instead.')
return load(stream)
- @functools.wraps(loads)
def parse_label(data, encoding='utf-8'):
+ loads.__doc__ + """
+ deprecated:: 0.4.0
+ Use loads instead.
+ """
- warnings.warn('parse_label is deprecated. use load instead.')
+ warnings.warn('parse_label is deprecated. use loads instead.')
return loads(data, encoding='utf-8')
|
Add depreciation messages to old parse_label methods.
|
## Code Before:
import io
import functools
import warnings
import six
from .decoder import LabelDecoder
from .encoder import LabelEncoder
def load(stream):
"""Parse an isis label from a stream.
:param stream: a ``.read()``-supporting file-like object containing a label.
if ``stream`` is a string it will be treated as a filename
"""
if isinstance(stream, six.string_types):
with open(stream, 'rb') as fp:
return LabelDecoder().decode(fp)
return LabelDecoder().decode(stream)
def loads(data, encoding='utf-8'):
"""Parse an isis label from a string.
:param data: an isis label as a string
:returns: a dictionary representation of the given isis label
"""
if not isinstance(data, bytes):
data = data.encode(encoding)
return LabelDecoder().decode(data)
def dump(label, stream):
LabelEncoder().encode(label, stream)
def dumps(label):
stream = io.BytesIO()
LabelEncoder().encode(label, stream)
return stream.getvalue()
@functools.wraps(load)
def parse_file_label(stream):
warnings.warn('parse_file_label is deprecated. use load instead.')
return load(stream)
@functools.wraps(loads)
def parse_label(data, encoding='utf-8'):
warnings.warn('parse_label is deprecated. use load instead.')
return loads(data, encoding='utf-8')
## Instruction:
Add depreciation messages to old parse_label methods.
## Code After:
import io
import warnings
import six
from .decoder import LabelDecoder
from .encoder import LabelEncoder
def load(stream):
"""Parse an isis label from a stream.
:param stream: a ``.read()``-supporting file-like object containing a label.
if ``stream`` is a string it will be treated as a filename
"""
if isinstance(stream, six.string_types):
with open(stream, 'rb') as fp:
return LabelDecoder().decode(fp)
return LabelDecoder().decode(stream)
def loads(data, encoding='utf-8'):
"""Parse an isis label from a string.
:param data: an isis label as a string
:returns: a dictionary representation of the given isis label
"""
if not isinstance(data, bytes):
data = data.encode(encoding)
return LabelDecoder().decode(data)
def dump(label, stream):
LabelEncoder().encode(label, stream)
def dumps(label):
stream = io.BytesIO()
LabelEncoder().encode(label, stream)
return stream.getvalue()
def parse_file_label(stream):
load.__doc__ + """
deprecated:: 0.4.0
Use load instead.
"""
warnings.warn('parse_file_label is deprecated. use load instead.')
return load(stream)
def parse_label(data, encoding='utf-8'):
loads.__doc__ + """
deprecated:: 0.4.0
Use loads instead.
"""
warnings.warn('parse_label is deprecated. use loads instead.')
return loads(data, encoding='utf-8')
|
import io
- import functools
import warnings
import six
from .decoder import LabelDecoder
from .encoder import LabelEncoder
def load(stream):
"""Parse an isis label from a stream.
:param stream: a ``.read()``-supporting file-like object containing a label.
if ``stream`` is a string it will be treated as a filename
"""
if isinstance(stream, six.string_types):
with open(stream, 'rb') as fp:
return LabelDecoder().decode(fp)
return LabelDecoder().decode(stream)
def loads(data, encoding='utf-8'):
"""Parse an isis label from a string.
:param data: an isis label as a string
:returns: a dictionary representation of the given isis label
"""
if not isinstance(data, bytes):
data = data.encode(encoding)
return LabelDecoder().decode(data)
def dump(label, stream):
LabelEncoder().encode(label, stream)
def dumps(label):
stream = io.BytesIO()
LabelEncoder().encode(label, stream)
return stream.getvalue()
- @functools.wraps(load)
def parse_file_label(stream):
+ load.__doc__ + """
+ deprecated:: 0.4.0
+ Use load instead.
+ """
warnings.warn('parse_file_label is deprecated. use load instead.')
return load(stream)
- @functools.wraps(loads)
def parse_label(data, encoding='utf-8'):
+ loads.__doc__ + """
+ deprecated:: 0.4.0
+ Use loads instead.
+ """
- warnings.warn('parse_label is deprecated. use load instead.')
+ warnings.warn('parse_label is deprecated. use loads instead.')
? +
return loads(data, encoding='utf-8')
|
a8ec60daaee52603a1c3bab879a5eee9f0fd931b
|
ddd/dataobjects/datatype.py
|
ddd/dataobjects/datatype.py
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',conversion=None,unit='-',constant=False):
self.basetype=basetype
if not conversion:
self.conversion=DddConversion(type='binary',fraction=1)
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Python
|
mit
|
toesus/ddd,Sauci/ddd,toesus/ddd,Sauci/ddd,Sauci/ddd
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
- def __init__(self,basetype='',conversion=None,unit='-',constant=False):
+ def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
+ self.bitsize=bitsize
+ self.signed=signed
if not conversion:
- self.conversion=DddConversion(type='binary',fraction=1)
+ self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
+ 'bitsize':self.bitsize,
+ 'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
## Code Before:
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',conversion=None,unit='-',constant=False):
self.basetype=basetype
if not conversion:
self.conversion=DddConversion(type='binary',fraction=1)
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
## Instruction:
Split basetype of DddDatatype into basetype,bitsize,signed
## Code After:
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
- def __init__(self,basetype='',conversion=None,unit='-',constant=False):
+ def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
? +++++++++++++++++++++++
self.basetype=basetype
+ self.bitsize=bitsize
+ self.signed=signed
if not conversion:
- self.conversion=DddConversion(type='binary',fraction=1)
? ^^^^^^ -----------
+ self.conversion=DddConversion(type='1to1')
? ^^^^
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
+ 'bitsize':self.bitsize,
+ 'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
e9df5070abcea31907479630810a64a007ff1f06
|
quotes_page/urls.py
|
quotes_page/urls.py
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
Disable init view for now, add extra url for specific quotes page
|
Disable init view for now, add extra url for specific quotes page
|
Python
|
mit
|
kirberich/qicrawler,kirberich/qicrawler
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
- url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
+ url(r'^$', 'quotes_page.core.views.main', name="main"),
+ url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
- url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
+ #url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
Disable init view for now, add extra url for specific quotes page
|
## Code Before:
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
## Instruction:
Disable init view for now, add extra url for specific quotes page
## Code After:
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
- url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
? -----------------
+ url(r'^$', 'quotes_page.core.views.main', name="main"),
+ url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
- url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
+ #url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
? +
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
082e34ae8d336d2fe93ea428db0b9a72bbfd649e
|
templatetags/stringformat.py
|
templatetags/stringformat.py
|
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
return fmt.format(value)
|
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
if isinstance(value, dict):
return fmt.format(**value)
return fmt.format(value)
|
Add named format support if dict
|
Add named format support if dict
|
Python
|
apache-2.0
|
kensonman/webframe,kensonman/webframe,kensonman/webframe
|
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
+ if isinstance(value, dict):
+ return fmt.format(**value)
return fmt.format(value)
|
Add named format support if dict
|
## Code Before:
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
return fmt.format(value)
## Instruction:
Add named format support if dict
## Code After:
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
if isinstance(value, dict):
return fmt.format(**value)
return fmt.format(value)
|
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
+ if isinstance(value, dict):
+ return fmt.format(**value)
return fmt.format(value)
|
62ebb94f09ea2dee3276041bd471502d57078650
|
mcrouter/test/test_mcrouter_to_mcrouter_tko.py
|
mcrouter/test/test_mcrouter_to_mcrouter_tko.py
|
import re
from mcrouter.test.McrouterTestCase import McrouterTestCase
class TestMcrouterToMcrouterTko(McrouterTestCase):
config = './mcrouter/test/test_mcrouter_to_mcrouter_tko.json'
extra_args = ['--timeouts-until-tko', '1', '--group-remote-errors']
def setUp(self):
self.underlying_mcr = self.add_mcrouter(self.config,
extra_args=self.extra_args, bg_mcrouter=True)
def get_mcrouter(self):
return self.add_mcrouter(self.config, extra_args=self.extra_args)
def test_underlying_tko(self):
mcr = self.get_mcrouter()
self.assertFalse(mcr.delete("key"))
stats = self.underlying_mcr.stats("suspect_servers")
print(stats)
self.assertEqual(1, len(stats))
self.assertTrue(re.match("status:(tko|down)", list(stats.values())[0]))
stats = mcr.stats("suspect_servers")
self.assertEqual(0, len(stats))
|
import re
import time
from mcrouter.test.McrouterTestCase import McrouterTestCase
class TestMcrouterToMcrouterTko(McrouterTestCase):
config = './mcrouter/test/test_mcrouter_to_mcrouter_tko.json'
extra_args = ['--timeouts-until-tko', '1', '--group-remote-errors']
def setUp(self):
self.underlying_mcr = self.add_mcrouter(self.config,
extra_args=self.extra_args, bg_mcrouter=True)
def get_mcrouter(self):
return self.add_mcrouter(self.config, extra_args=self.extra_args)
def test_underlying_tko(self):
mcr = self.get_mcrouter()
self.assertFalse(mcr.delete("key"))
retries = 10
while self.underlying_mcr.stats()['cmd_delete_count'] != 1 and retries > 0:
retries = retries - 1
time.sleep(1)
stats = self.underlying_mcr.stats("suspect_servers")
print(stats)
self.assertEqual(1, len(stats))
self.assertTrue(re.match("status:(tko|down)", list(stats.values())[0]))
stats = mcr.stats("suspect_servers")
self.assertEqual(0, len(stats))
|
Fix flaky mcrouter tko tests
|
Fix flaky mcrouter tko tests
Summary: As above
Reviewed By: edenzik
Differential Revision: D25722019
fbshipit-source-id: 06ff9200e99f3580db25fef9ca5ab167c50b97ed
|
Python
|
mit
|
facebook/mcrouter,facebook/mcrouter,facebook/mcrouter,facebook/mcrouter
|
import re
+ import time
from mcrouter.test.McrouterTestCase import McrouterTestCase
class TestMcrouterToMcrouterTko(McrouterTestCase):
config = './mcrouter/test/test_mcrouter_to_mcrouter_tko.json'
extra_args = ['--timeouts-until-tko', '1', '--group-remote-errors']
def setUp(self):
self.underlying_mcr = self.add_mcrouter(self.config,
extra_args=self.extra_args, bg_mcrouter=True)
def get_mcrouter(self):
return self.add_mcrouter(self.config, extra_args=self.extra_args)
def test_underlying_tko(self):
mcr = self.get_mcrouter()
self.assertFalse(mcr.delete("key"))
+ retries = 10
+ while self.underlying_mcr.stats()['cmd_delete_count'] != 1 and retries > 0:
+ retries = retries - 1
+ time.sleep(1)
+
stats = self.underlying_mcr.stats("suspect_servers")
print(stats)
self.assertEqual(1, len(stats))
self.assertTrue(re.match("status:(tko|down)", list(stats.values())[0]))
stats = mcr.stats("suspect_servers")
self.assertEqual(0, len(stats))
|
Fix flaky mcrouter tko tests
|
## Code Before:
import re
from mcrouter.test.McrouterTestCase import McrouterTestCase
class TestMcrouterToMcrouterTko(McrouterTestCase):
config = './mcrouter/test/test_mcrouter_to_mcrouter_tko.json'
extra_args = ['--timeouts-until-tko', '1', '--group-remote-errors']
def setUp(self):
self.underlying_mcr = self.add_mcrouter(self.config,
extra_args=self.extra_args, bg_mcrouter=True)
def get_mcrouter(self):
return self.add_mcrouter(self.config, extra_args=self.extra_args)
def test_underlying_tko(self):
mcr = self.get_mcrouter()
self.assertFalse(mcr.delete("key"))
stats = self.underlying_mcr.stats("suspect_servers")
print(stats)
self.assertEqual(1, len(stats))
self.assertTrue(re.match("status:(tko|down)", list(stats.values())[0]))
stats = mcr.stats("suspect_servers")
self.assertEqual(0, len(stats))
## Instruction:
Fix flaky mcrouter tko tests
## Code After:
import re
import time
from mcrouter.test.McrouterTestCase import McrouterTestCase
class TestMcrouterToMcrouterTko(McrouterTestCase):
config = './mcrouter/test/test_mcrouter_to_mcrouter_tko.json'
extra_args = ['--timeouts-until-tko', '1', '--group-remote-errors']
def setUp(self):
self.underlying_mcr = self.add_mcrouter(self.config,
extra_args=self.extra_args, bg_mcrouter=True)
def get_mcrouter(self):
return self.add_mcrouter(self.config, extra_args=self.extra_args)
def test_underlying_tko(self):
mcr = self.get_mcrouter()
self.assertFalse(mcr.delete("key"))
retries = 10
while self.underlying_mcr.stats()['cmd_delete_count'] != 1 and retries > 0:
retries = retries - 1
time.sleep(1)
stats = self.underlying_mcr.stats("suspect_servers")
print(stats)
self.assertEqual(1, len(stats))
self.assertTrue(re.match("status:(tko|down)", list(stats.values())[0]))
stats = mcr.stats("suspect_servers")
self.assertEqual(0, len(stats))
|
import re
+ import time
from mcrouter.test.McrouterTestCase import McrouterTestCase
class TestMcrouterToMcrouterTko(McrouterTestCase):
config = './mcrouter/test/test_mcrouter_to_mcrouter_tko.json'
extra_args = ['--timeouts-until-tko', '1', '--group-remote-errors']
def setUp(self):
self.underlying_mcr = self.add_mcrouter(self.config,
extra_args=self.extra_args, bg_mcrouter=True)
def get_mcrouter(self):
return self.add_mcrouter(self.config, extra_args=self.extra_args)
def test_underlying_tko(self):
mcr = self.get_mcrouter()
self.assertFalse(mcr.delete("key"))
+ retries = 10
+ while self.underlying_mcr.stats()['cmd_delete_count'] != 1 and retries > 0:
+ retries = retries - 1
+ time.sleep(1)
+
stats = self.underlying_mcr.stats("suspect_servers")
print(stats)
self.assertEqual(1, len(stats))
self.assertTrue(re.match("status:(tko|down)", list(stats.values())[0]))
stats = mcr.stats("suspect_servers")
self.assertEqual(0, len(stats))
|
084eb32734731ee23e33e7360ec9f92e1e533f01
|
__init__.py
|
__init__.py
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
Set better defaults for numpy's print function
|
Set better defaults for numpy's print function
|
Python
|
bsd-3-clause
|
macthecadillac/spinsys
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
+ import shutil
+ import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
+ # set default print options for better display of data on screen
+ term_width = tuple(shutil.get_terminal_size())[0]
+ numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
+
|
Set better defaults for numpy's print function
|
## Code Before:
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
## Instruction:
Set better defaults for numpy's print function
## Code After:
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
+ import shutil
+ import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
+
+ # set default print options for better display of data on screen
+ term_width = tuple(shutil.get_terminal_size())[0]
+ numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
f96d26e8686cb2d1a15860414b90e48418e41f38
|
tests/integration/conftest.py
|
tests/integration/conftest.py
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
Purge images and containers before each test
|
tests: Purge images and containers before each test
Signed-off-by: Esben Haabendal <[email protected]>
|
Python
|
mit
|
XD-embedded/xd-docker,XD-embedded/xd-docker,esben/xd-docker,esben/xd-docker
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
- @pytest.fixture(scope="module")
+ @pytest.fixture(scope="function")
def docker(request):
+ os.system("for c in `docker ps -a -q`;do docker rm $c;done")
+ os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
Purge images and containers before each test
|
## Code Before:
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
## Instruction:
Purge images and containers before each test
## Code After:
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
- @pytest.fixture(scope="module")
? ^ ^^^^
+ @pytest.fixture(scope="function")
? ^^^^^^ ^
def docker(request):
+ os.system("for c in `docker ps -a -q`;do docker rm $c;done")
+ os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
e2c92e8b6e8fb10addc73986914014b278598470
|
spotpy/examples/spot_setup_standardnormal.py
|
spotpy/examples/spot_setup_standardnormal.py
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
Fix docstring in standardnormal example
|
Fix docstring in standardnormal example
|
Python
|
mit
|
bees4ever/spotpy,bees4ever/spotpy,bees4ever/spotpy,thouska/spotpy,thouska/spotpy,thouska/spotpy
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
- This example implements the Rosenbrock function into SPOT.
+ This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
Fix docstring in standardnormal example
|
## Code Before:
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
## Instruction:
Fix docstring in standardnormal example
## Code After:
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
- This example implements the Rosenbrock function into SPOT.
? ^ ---- ^^^
+ This example implements the Standard Normal function into SPOT.
? ^^^^^^^^^^ ^^^
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction
|
dc251707fdcd9fe021b6f8627ffbb139d42423b3
|
cairis/test/CairisDaemonTestCase.py
|
cairis/test/CairisDaemonTestCase.py
|
from time import sleep
import unittest
import os
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisDaemonTestCase(unittest.TestCase):
cmd = os.environ['CAIRIS_SRC'] + "/test/initdb.sh"
os.system(cmd)
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
from time import sleep
import unittest
import os
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisDaemonTestCase(unittest.TestCase):
srcRoot = os.environ['CAIRIS_SRC']
createDbSql = srcRoot + '/test/createdb.sql'
sqlDir = srcRoot + '/sql'
initSql = sqlDir + '/init.sql'
procsSql = sqlDir + '/procs.sql'
cmd = "/usr/bin/mysql --user=root --password='' < " + createDbSql
os.system(cmd)
cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + initSql
os.system(cmd)
cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + procsSql
os.system(cmd)
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
Change database init script for daemon tests
|
Change database init script for daemon tests
|
Python
|
apache-2.0
|
nathanbjenx/cairis,nathanbjenx/cairis,nathanbjenx/cairis,failys/CAIRIS,failys/CAIRIS,failys/CAIRIS,nathanbjenx/cairis
|
from time import sleep
import unittest
import os
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisDaemonTestCase(unittest.TestCase):
- cmd = os.environ['CAIRIS_SRC'] + "/test/initdb.sh"
+ srcRoot = os.environ['CAIRIS_SRC']
+ createDbSql = srcRoot + '/test/createdb.sql'
+ sqlDir = srcRoot + '/sql'
+ initSql = sqlDir + '/init.sql'
+ procsSql = sqlDir + '/procs.sql'
+ cmd = "/usr/bin/mysql --user=root --password='' < " + createDbSql
+ os.system(cmd)
+ cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + initSql
+ os.system(cmd)
+ cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + procsSql
os.system(cmd)
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
Change database init script for daemon tests
|
## Code Before:
from time import sleep
import unittest
import os
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisDaemonTestCase(unittest.TestCase):
cmd = os.environ['CAIRIS_SRC'] + "/test/initdb.sh"
os.system(cmd)
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
## Instruction:
Change database init script for daemon tests
## Code After:
from time import sleep
import unittest
import os
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisDaemonTestCase(unittest.TestCase):
srcRoot = os.environ['CAIRIS_SRC']
createDbSql = srcRoot + '/test/createdb.sql'
sqlDir = srcRoot + '/sql'
initSql = sqlDir + '/init.sql'
procsSql = sqlDir + '/procs.sql'
cmd = "/usr/bin/mysql --user=root --password='' < " + createDbSql
os.system(cmd)
cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + initSql
os.system(cmd)
cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + procsSql
os.system(cmd)
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
from time import sleep
import unittest
import os
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisDaemonTestCase(unittest.TestCase):
- cmd = os.environ['CAIRIS_SRC'] + "/test/initdb.sh"
+ srcRoot = os.environ['CAIRIS_SRC']
+ createDbSql = srcRoot + '/test/createdb.sql'
+ sqlDir = srcRoot + '/sql'
+ initSql = sqlDir + '/init.sql'
+ procsSql = sqlDir + '/procs.sql'
+ cmd = "/usr/bin/mysql --user=root --password='' < " + createDbSql
+ os.system(cmd)
+ cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + initSql
+ os.system(cmd)
+ cmd = "/usr/bin/mysql --user=irisuser --password='' --database=arm < " + procsSql
os.system(cmd)
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
96a08a9c7b11ce96de1c2034efcc19622c4eb419
|
drillion/ship_keys.py
|
drillion/ship_keys.py
|
from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
thrust=[key.UP], fire=[key.DOWN])
|
from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
fire=[key.K])
|
Change second ship controls to IJKL
|
Change second ship controls to IJKL
|
Python
|
mit
|
elemel/drillion
|
from pyglet.window import key
- PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
+ PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
- thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
+ thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
- PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
+ PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
- thrust=[key.UP], fire=[key.DOWN])
+ fire=[key.K])
|
Change second ship controls to IJKL
|
## Code Before:
from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
thrust=[key.UP], fire=[key.DOWN])
## Instruction:
Change second ship controls to IJKL
## Code After:
from pyglet.window import key
PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
thrust=[key.W, key.I], fire=[key.S, key.K])
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
fire=[key.K])
|
from pyglet.window import key
- PLAYER_SHIP_KEYS = dict(left=[key.A, key.LEFT], right=[key.D, key.RIGHT],
? ^^^^ ^^^^^
+ PLAYER_SHIP_KEYS = dict(left=[key.A, key.J], right=[key.D, key.L],
? ^ ^
- thrust=[key.W, key.UP], fire=[key.S, key.DOWN])
? ^^ ^^^^
+ thrust=[key.W, key.I], fire=[key.S, key.K])
? ^ ^
PLAYER_1_SHIP_KEYS = dict(left=[key.A], right=[key.D], thrust=[key.W],
fire=[key.S])
- PLAYER_2_SHIP_KEYS = dict(left=[key.LEFT], right=[key.RIGHT],
? ^^^^ ^^^^^
+ PLAYER_2_SHIP_KEYS = dict(left=[key.J], right=[key.L], thrust=[key.I],
? ^ ^ ++++++++++++++++
- thrust=[key.UP], fire=[key.DOWN])
? ----------------- ^^^^
+ fire=[key.K])
? ^
|
cbdfc1b1cb4162256538576cabe2b6832aa83bca
|
django_mysqlpool/__init__.py
|
django_mysqlpool/__init__.py
|
from functools import wraps
from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
Fix circular import when used with other add-ons that import django.db
|
Fix circular import when used with other add-ons that import django.db
eg sorl_thumbnail:
Traceback (most recent call last):
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/bin/manage", line 40, in <module>
sys.exit(manage.main())
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/retrans/manage.py", line 15, in main
execute_manager(settings)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 438, in execute_manager
utility.execute()
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 379, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 191, in run_from_argv
self.execute(*args, **options.__dict__)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 209, in execute
translation.activate('en-us')
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/__init__.py", line 100, in activate
return _trans.activate(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 202, in activate
_active.value = translation(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 185, in translation
default_translation = _fetch(settings.LANGUAGE_CODE)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 162, in _fetch
app = import_module(appname)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/importlib.py", line 35, in import_module
__import__(name)
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/__init__.py", line 1, in <module>
from sorl.thumbnail.fields import ImageField
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/fields.py", line 2, in <module>
from django.db import models
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/__init__.py", line 78, in <module>
connection = connections[DEFAULT_DB_ALIAS]
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 94, in __getitem__
backend = load_backend(db['ENGINE'])
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 47, in load_backend
if backend_name not in available_backends:
django.core.exceptions.ImproperlyConfigured: 'django_mysqlpool.backends.mysqlpool' isn't an available database backend.
Try using django.db.backends.XXX, where XXX is one of:
'dummy', 'mysql', 'oracle', 'postgresql', 'postgresql_psycopg2', 'sqlite3'
Error was: cannot import name connection
|
Python
|
mit
|
smartfile/django-mysqlpool
|
from functools import wraps
- from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
+ from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
Fix circular import when used with other add-ons that import django.db
|
## Code Before:
from functools import wraps
from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
## Instruction:
Fix circular import when used with other add-ons that import django.db
## Code After:
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
from functools import wraps
- from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
+ from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
667294dcc3b8ab34618ad674c2b6ac8efeec0620
|
places/admin.py
|
places/admin.py
|
from django.contrib.gis import admin
from models import Place
admin.site.register(Place, admin.OSMGeoAdmin)
|
from django.contrib.gis import admin
from models import Place
try:
_model_admin = admin.OSMGeoAdmin
except AttributeError:
_model_admin = admin.ModelAdmin
admin.site.register(Place, _model_admin)
|
Make it possible to run dev server on my desktop.
|
Make it possible to run dev server on my desktop.
While I'm accessing a suitable database remotely, I don't have enough
stuff installed locally to have OSMGeoAdmin (no GDAL installed, for
example).
|
Python
|
bsd-3-clause
|
MAPC/masshealth,MAPC/masshealth
|
from django.contrib.gis import admin
from models import Place
- admin.site.register(Place, admin.OSMGeoAdmin)
+ try:
+ _model_admin = admin.OSMGeoAdmin
+ except AttributeError:
+ _model_admin = admin.ModelAdmin
+ admin.site.register(Place, _model_admin)
+
|
Make it possible to run dev server on my desktop.
|
## Code Before:
from django.contrib.gis import admin
from models import Place
admin.site.register(Place, admin.OSMGeoAdmin)
## Instruction:
Make it possible to run dev server on my desktop.
## Code After:
from django.contrib.gis import admin
from models import Place
try:
_model_admin = admin.OSMGeoAdmin
except AttributeError:
_model_admin = admin.ModelAdmin
admin.site.register(Place, _model_admin)
|
from django.contrib.gis import admin
from models import Place
+ try:
+ _model_admin = admin.OSMGeoAdmin
+ except AttributeError:
+ _model_admin = admin.ModelAdmin
+
- admin.site.register(Place, admin.OSMGeoAdmin)
? ------------
+ admin.site.register(Place, _model_admin)
? +++++++
|
6fa5c20f4d3b6ea9716adbf4c5fd50739f2f987e
|
protractor/test.py
|
protractor/test.py
|
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
protractor_command += ' --params.live_server_url={}'.format(self.live_server_url)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().iteritems():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
Add hook for protactor params
|
Add hook for protactor params
|
Python
|
mit
|
jpulec/django-protractor,penguin359/django-protractor
|
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
+ def get_protractor_params(self):
+ """A hook for adding params that protractor will receive."""
+ return {
+ 'live_server_url': self.live_server_url
+ }
+
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
- protractor_command += ' --params.live_server_url={}'.format(self.live_server_url)
+ for key, value in self.get_protractor_params().iteritems():
+ protractor_command += ' --params.{key}={value}'.format(
+ key=key, value=value
+ )
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
Add hook for protactor params
|
## Code Before:
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
protractor_command += ' --params.live_server_url={}'.format(self.live_server_url)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
## Instruction:
Add hook for protactor params
## Code After:
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().iteritems():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
+ def get_protractor_params(self):
+ """A hook for adding params that protractor will receive."""
+ return {
+ 'live_server_url': self.live_server_url
+ }
+
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
- protractor_command += ' --params.live_server_url={}'.format(self.live_server_url)
+ for key, value in self.get_protractor_params().iteritems():
+ protractor_command += ' --params.{key}={value}'.format(
+ key=key, value=value
+ )
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
aa436864f53a4c77b4869baabfb1478d7fea36f0
|
tests/products/__init__.py
|
tests/products/__init__.py
|
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
|
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
|
Allow str type comparison in py2/3
|
Allow str type comparison in py2/3
|
Python
|
bsd-3-clause
|
ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tiles,ceholden/tilezilla,ceholden/landsat_tile
|
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
- 'timeseries_id': str,
+ 'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
- 'platform': str,
+ 'platform': six.string_types,
- 'instrument': str,
+ 'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
- if isinstance(_type, type):
+ if isinstance(_type, (type, tuple)):
+ # Type declaration one or more types
assert isinstance(value, _type)
else:
+ # Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
|
Allow str type comparison in py2/3
|
## Code Before:
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
## Instruction:
Allow str type comparison in py2/3
## Code After:
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
|
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
- 'timeseries_id': str,
+ 'timeseries_id': six.string_types,
? ++++ +++++++++
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
- 'platform': str,
+ 'platform': six.string_types,
? ++++ +++++++++
- 'instrument': str,
+ 'instrument': six.string_types,
? ++++ +++++++++
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
- if isinstance(_type, type):
+ if isinstance(_type, (type, tuple)):
? + ++++++++
+ # Type declaration one or more types
assert isinstance(value, _type)
else:
+ # Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
|
3be0c6c18a61f35ae5804464cc0da867fd0065f5
|
tests/test_ez_setup.py
|
tests/test_ez_setup.py
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import (use_setuptools, _python_cmd, _install)
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
def test_use_setuptools(self):
self.assertEqual(use_setuptools(), None)
if __name__ == '__main__':
unittest.main()
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import _python_cmd, _install
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
if __name__ == '__main__':
unittest.main()
|
Remove test for use_setuptools, as it fails when running under pytest because the installed version of setuptools is already present.
|
Remove test for use_setuptools, as it fails when running under pytest because the installed version of setuptools is already present.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
- from ez_setup import (use_setuptools, _python_cmd, _install)
+ from ez_setup import _python_cmd, _install
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
- def test_use_setuptools(self):
- self.assertEqual(use_setuptools(), None)
-
if __name__ == '__main__':
unittest.main()
|
Remove test for use_setuptools, as it fails when running under pytest because the installed version of setuptools is already present.
|
## Code Before:
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import (use_setuptools, _python_cmd, _install)
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
def test_use_setuptools(self):
self.assertEqual(use_setuptools(), None)
if __name__ == '__main__':
unittest.main()
## Instruction:
Remove test for use_setuptools, as it fails when running under pytest because the installed version of setuptools is already present.
## Code After:
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import _python_cmd, _install
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
if __name__ == '__main__':
unittest.main()
|
import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
- from ez_setup import (use_setuptools, _python_cmd, _install)
? ----------------- -
+ from ez_setup import _python_cmd, _install
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
- def test_use_setuptools(self):
- self.assertEqual(use_setuptools(), None)
-
if __name__ == '__main__':
unittest.main()
|
c64682fe6204b56bd5282c46a7c7168a55b46a86
|
spicedham/__init__.py
|
spicedham/__init__.py
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
Allow for the case where no plugin returns a score
|
Allow for the case where no plugin returns a score
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
+ if total > 0:
- return average_score / total
+ return average_score / total
+ else:
+ return 0
|
Allow for the case where no plugin returns a score
|
## Code Before:
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
## Instruction:
Allow for the case where no plugin returns a score
## Code After:
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
+ if total > 0:
- return average_score / total
+ return average_score / total
? ++++
+ else:
+ return 0
|
ff8aa2725001dbd1281357ccd5e0877257b5975d
|
hackernews_scrapy/items.py
|
hackernews_scrapy/items.py
|
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
|
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
|
Add crawled_at field to HackernewsScrapyItem
|
Add crawled_at field to HackernewsScrapyItem
|
Python
|
mit
|
mdsrosa/hackernews_scrapy
|
import scrapy
class HackernewsScrapyItem(scrapy.Item):
- title = scrapy.Field()
+ title = scrapy.Field()
+ crawled_at = scrapy.Field()
|
Add crawled_at field to HackernewsScrapyItem
|
## Code Before:
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
## Instruction:
Add crawled_at field to HackernewsScrapyItem
## Code After:
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
|
import scrapy
class HackernewsScrapyItem(scrapy.Item):
- title = scrapy.Field()
? ^
+ title = scrapy.Field()
? ^^^^
+ crawled_at = scrapy.Field()
|
17b9749f2a36499c74effc27ec442a4bb957e877
|
typescript/commands/build.py
|
typescript/commands/build.py
|
import sublime_plugin
from ..libs.global_vars import *
from ..libs import cli
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
if get_node_path() is None:
print("Cannot found node. Build cancelled.")
return
file_name = self.window.active_view().file_name()
project_info = cli.service.project_info(file_name)
if project_info["success"]:
if "configFileName" in project_info["body"]:
tsconfig_dir = dirname(project_info["body"]["configFileName"])
self.window.run_command("exec", {
"cmd": [get_node_path(), TSC_PATH, "-p", tsconfig_dir],
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
"shell": True
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
cmd = [get_node_path(), TSC_PATH, file_name]
if params != "":
cmd.extend(params.split(' '))
self.window.run_command("exec", {
"cmd": cmd,
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
"shell": True
})
|
import sublime_plugin
from ..libs.global_vars import *
from ..libs import cli
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
if get_node_path() is None:
print("Cannot found node. Build cancelled.")
return
file_name = self.window.active_view().file_name()
project_info = cli.service.project_info(file_name)
if project_info["success"]:
if "configFileName" in project_info["body"]:
tsconfig_dir = dirname(project_info["body"]["configFileName"])
self.window.run_command("exec", {
"cmd": [get_node_path(), TSC_PATH, "-p", tsconfig_dir],
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
cmd = [get_node_path(), TSC_PATH, file_name]
print(cmd)
if params != "":
cmd.extend(params.split(' '))
self.window.run_command("exec", {
"cmd": cmd,
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
|
Remove shell requirement to avoid escaping
|
Remove shell requirement to avoid escaping
|
Python
|
apache-2.0
|
fongandrew/TypeScript-Sublime-JSX-Plugin,Microsoft/TypeScript-Sublime-Plugin,fongandrew/TypeScript-Sublime-JSX-Plugin,zhengbli/TypeScript-Sublime-Plugin,kungfusheep/TypeScript-Sublime-Plugin,RyanCavanaugh/TypeScript-Sublime-Plugin,zhengbli/TypeScript-Sublime-Plugin,kungfusheep/TypeScript-Sublime-Plugin,RyanCavanaugh/TypeScript-Sublime-Plugin,fongandrew/TypeScript-Sublime-JSX-Plugin,Microsoft/TypeScript-Sublime-Plugin,Microsoft/TypeScript-Sublime-Plugin,zhengbli/TypeScript-Sublime-Plugin,kungfusheep/TypeScript-Sublime-Plugin,hoanhtien/TypeScript-Sublime-Plugin,hoanhtien/TypeScript-Sublime-Plugin,hoanhtien/TypeScript-Sublime-Plugin,RyanCavanaugh/TypeScript-Sublime-Plugin
|
import sublime_plugin
from ..libs.global_vars import *
from ..libs import cli
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
if get_node_path() is None:
print("Cannot found node. Build cancelled.")
return
file_name = self.window.active_view().file_name()
project_info = cli.service.project_info(file_name)
if project_info["success"]:
if "configFileName" in project_info["body"]:
tsconfig_dir = dirname(project_info["body"]["configFileName"])
self.window.run_command("exec", {
"cmd": [get_node_path(), TSC_PATH, "-p", tsconfig_dir],
- "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
- "shell": True
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
cmd = [get_node_path(), TSC_PATH, file_name]
+ print(cmd)
if params != "":
cmd.extend(params.split(' '))
self.window.run_command("exec", {
"cmd": cmd,
- "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
- "shell": True
})
|
Remove shell requirement to avoid escaping
|
## Code Before:
import sublime_plugin
from ..libs.global_vars import *
from ..libs import cli
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
if get_node_path() is None:
print("Cannot found node. Build cancelled.")
return
file_name = self.window.active_view().file_name()
project_info = cli.service.project_info(file_name)
if project_info["success"]:
if "configFileName" in project_info["body"]:
tsconfig_dir = dirname(project_info["body"]["configFileName"])
self.window.run_command("exec", {
"cmd": [get_node_path(), TSC_PATH, "-p", tsconfig_dir],
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
"shell": True
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
cmd = [get_node_path(), TSC_PATH, file_name]
if params != "":
cmd.extend(params.split(' '))
self.window.run_command("exec", {
"cmd": cmd,
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
"shell": True
})
## Instruction:
Remove shell requirement to avoid escaping
## Code After:
import sublime_plugin
from ..libs.global_vars import *
from ..libs import cli
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
if get_node_path() is None:
print("Cannot found node. Build cancelled.")
return
file_name = self.window.active_view().file_name()
project_info = cli.service.project_info(file_name)
if project_info["success"]:
if "configFileName" in project_info["body"]:
tsconfig_dir = dirname(project_info["body"]["configFileName"])
self.window.run_command("exec", {
"cmd": [get_node_path(), TSC_PATH, "-p", tsconfig_dir],
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
cmd = [get_node_path(), TSC_PATH, file_name]
print(cmd)
if params != "":
cmd.extend(params.split(' '))
self.window.run_command("exec", {
"cmd": cmd,
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
|
import sublime_plugin
from ..libs.global_vars import *
from ..libs import cli
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
if get_node_path() is None:
print("Cannot found node. Build cancelled.")
return
file_name = self.window.active_view().file_name()
project_info = cli.service.project_info(file_name)
if project_info["success"]:
if "configFileName" in project_info["body"]:
tsconfig_dir = dirname(project_info["body"]["configFileName"])
self.window.run_command("exec", {
"cmd": [get_node_path(), TSC_PATH, "-p", tsconfig_dir],
- "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
? -
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
- "shell": True
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
cmd = [get_node_path(), TSC_PATH, file_name]
+ print(cmd)
if params != "":
cmd.extend(params.split(' '))
self.window.run_command("exec", {
"cmd": cmd,
- "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
? -
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
- "shell": True
})
|
2168557dc088be1b991f7eb42dabac144e3add3b
|
src/ggrc/models/event.py
|
src/ggrc/models/event.py
|
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
db.Index(
'ix_{}_updated_at'.format(class_.__tablename__),
'updated_at',
),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
|
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
|
Remove redundant index declaration from Event
|
Remove redundant index declaration from Event
The updated at index is already declared in ChangeTracked mixin which is
included in the Base mixin.
|
Python
|
apache-2.0
|
plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core
|
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
- db.Index(
- 'ix_{}_updated_at'.format(class_.__tablename__),
- 'updated_at',
- ),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
|
Remove redundant index declaration from Event
|
## Code Before:
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
db.Index(
'ix_{}_updated_at'.format(class_.__tablename__),
'updated_at',
),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
## Instruction:
Remove redundant index declaration from Event
## Code After:
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
|
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
- db.Index(
- 'ix_{}_updated_at'.format(class_.__tablename__),
- 'updated_at',
- ),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
|
5f6fb5866ca74793b05308ac27c4698033068cfe
|
tvtk/tests/test_garbage_collection.py
|
tvtk/tests/test_garbage_collection.py
|
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
from traits.etsconfig.api import ETSConfig
from tvtk.pyface.scene import Scene
from tvtk.pyface.scene_model import SceneModel
from tvtk.tests.common import TestGarbageCollection
class TestTVTKGarbageCollection(TestGarbageCollection):
""" See: tvtk.tests.common.TestGarbageCollection
"""
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene(self):
""" Tests if Scene can be garbage collected."""
def obj_fn():
return Scene()
def close_fn(o):
o.closed = True
self.check_object_garbage_collected(obj_fn, close_fn)
def test_scene_model(self):
""" Tests if SceneModel can be garbage collected."""
def create_fn():
return SceneModel()
def close_fn(obj):
obj.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
|
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
from traits.etsconfig.api import ETSConfig
from tvtk.pyface.tvtk_scene import TVTKScene
from tvtk.pyface.scene import Scene
from tvtk.pyface.scene_model import SceneModel
from tvtk.tests.common import TestGarbageCollection
class TestTVTKGarbageCollection(TestGarbageCollection):
""" See: tvtk.tests.common.TestGarbageCollection
"""
def test_tvtk_scene(self):
""" Tests if TVTK scene can be garbage collected."""
def create_fn():
return TVTKScene()
def close_fn(o):
o.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene(self):
""" Tests if Scene can be garbage collected."""
def create_fn():
return Scene()
def close_fn(o):
o.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
def test_scene_model(self):
""" Tests if SceneModel can be garbage collected."""
def create_fn():
return SceneModel()
def close_fn(obj):
obj.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
|
Test TVTKScene garbage collection and renaming
|
Test TVTKScene garbage collection and renaming
|
Python
|
bsd-3-clause
|
alexandreleroux/mayavi,alexandreleroux/mayavi,liulion/mayavi,liulion/mayavi,dmsurti/mayavi,dmsurti/mayavi
|
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
from traits.etsconfig.api import ETSConfig
+ from tvtk.pyface.tvtk_scene import TVTKScene
from tvtk.pyface.scene import Scene
from tvtk.pyface.scene_model import SceneModel
from tvtk.tests.common import TestGarbageCollection
class TestTVTKGarbageCollection(TestGarbageCollection):
""" See: tvtk.tests.common.TestGarbageCollection
"""
+ def test_tvtk_scene(self):
+ """ Tests if TVTK scene can be garbage collected."""
+ def create_fn():
+ return TVTKScene()
+
+ def close_fn(o):
+ o.closed = True
+
+ self.check_object_garbage_collected(create_fn, close_fn)
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene(self):
""" Tests if Scene can be garbage collected."""
- def obj_fn():
+ def create_fn():
return Scene()
def close_fn(o):
o.closed = True
- self.check_object_garbage_collected(obj_fn, close_fn)
+ self.check_object_garbage_collected(create_fn, close_fn)
def test_scene_model(self):
""" Tests if SceneModel can be garbage collected."""
def create_fn():
return SceneModel()
def close_fn(obj):
obj.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
|
Test TVTKScene garbage collection and renaming
|
## Code Before:
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
from traits.etsconfig.api import ETSConfig
from tvtk.pyface.scene import Scene
from tvtk.pyface.scene_model import SceneModel
from tvtk.tests.common import TestGarbageCollection
class TestTVTKGarbageCollection(TestGarbageCollection):
""" See: tvtk.tests.common.TestGarbageCollection
"""
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene(self):
""" Tests if Scene can be garbage collected."""
def obj_fn():
return Scene()
def close_fn(o):
o.closed = True
self.check_object_garbage_collected(obj_fn, close_fn)
def test_scene_model(self):
""" Tests if SceneModel can be garbage collected."""
def create_fn():
return SceneModel()
def close_fn(obj):
obj.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
## Instruction:
Test TVTKScene garbage collection and renaming
## Code After:
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
from traits.etsconfig.api import ETSConfig
from tvtk.pyface.tvtk_scene import TVTKScene
from tvtk.pyface.scene import Scene
from tvtk.pyface.scene_model import SceneModel
from tvtk.tests.common import TestGarbageCollection
class TestTVTKGarbageCollection(TestGarbageCollection):
""" See: tvtk.tests.common.TestGarbageCollection
"""
def test_tvtk_scene(self):
""" Tests if TVTK scene can be garbage collected."""
def create_fn():
return TVTKScene()
def close_fn(o):
o.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene(self):
""" Tests if Scene can be garbage collected."""
def create_fn():
return Scene()
def close_fn(o):
o.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
def test_scene_model(self):
""" Tests if SceneModel can be garbage collected."""
def create_fn():
return SceneModel()
def close_fn(obj):
obj.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
|
# Authors: Deepak Surti, Ioannis Tziakos
# Copyright (c) 2015, Enthought, Inc.
# License: BSD Style.
import unittest
from traits.etsconfig.api import ETSConfig
+ from tvtk.pyface.tvtk_scene import TVTKScene
from tvtk.pyface.scene import Scene
from tvtk.pyface.scene_model import SceneModel
from tvtk.tests.common import TestGarbageCollection
class TestTVTKGarbageCollection(TestGarbageCollection):
""" See: tvtk.tests.common.TestGarbageCollection
"""
+ def test_tvtk_scene(self):
+ """ Tests if TVTK scene can be garbage collected."""
+ def create_fn():
+ return TVTKScene()
+
+ def close_fn(o):
+ o.closed = True
+
+ self.check_object_garbage_collected(create_fn, close_fn)
@unittest.skipIf(
ETSConfig.toolkit=='wx', 'Test segfaults using WX (issue #216)')
def test_scene(self):
""" Tests if Scene can be garbage collected."""
- def obj_fn():
? ^^^
+ def create_fn():
? ^^^^^^
return Scene()
def close_fn(o):
o.closed = True
- self.check_object_garbage_collected(obj_fn, close_fn)
? ^^^
+ self.check_object_garbage_collected(create_fn, close_fn)
? ^^^^^^
def test_scene_model(self):
""" Tests if SceneModel can be garbage collected."""
def create_fn():
return SceneModel()
def close_fn(obj):
obj.closed = True
self.check_object_garbage_collected(create_fn, close_fn)
|
182b94f777b1743671b706c939ce14f89c31efca
|
lint/queue.py
|
lint/queue.py
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
MIN_DELAY = 0.1
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay', MIN_DELAY)
queue = Daemon()
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
|
Remove MIN_DELAY bc a default setting is guaranteed
|
Remove MIN_DELAY bc a default setting is guaranteed
|
Python
|
mit
|
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
- MIN_DELAY = 0.1
-
-
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
- return persist.settings.get('delay', MIN_DELAY)
+ return persist.settings.get('delay')
queue = Daemon()
|
Remove MIN_DELAY bc a default setting is guaranteed
|
## Code Before:
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
MIN_DELAY = 0.1
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay', MIN_DELAY)
queue = Daemon()
## Instruction:
Remove MIN_DELAY bc a default setting is guaranteed
## Code After:
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
|
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
- MIN_DELAY = 0.1
-
-
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
- return persist.settings.get('delay', MIN_DELAY)
? -----------
+ return persist.settings.get('delay')
queue = Daemon()
|
04cca2c87cc8e56ecd84e1b3125a7a7b8c67b026
|
norc_utils/backup.py
|
norc_utils/backup.py
|
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
Python
|
bsd-3-clause
|
darrellsilver/norc,darrellsilver/norc
|
import os
- from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
+ from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
- AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
+ from norc.settings import (AWS_ACCESS_KEY_ID,
+ AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
## Code Before:
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
## Instruction:
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
## Code After:
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
import os
- from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
? - -
+ from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
- AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
+ from norc.settings import (AWS_ACCESS_KEY_ID,
+ AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
09195f50e328d3aee4cc60f0702d8605ea520eb3
|
tests/sentry/utils/models/tests.py
|
tests/sentry/utils/models/tests.py
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
|
Add missing assertion in test
|
Add missing assertion in test
|
Python
|
bsd-3-clause
|
NickPresta/sentry,jokey2k/sentry,1tush/sentry,zenefits/sentry,SilentCircle/sentry,wujuguang/sentry,ifduyue/sentry,Kryz/sentry,JamesMura/sentry,Natim/sentry,NickPresta/sentry,BuildingLink/sentry,rdio/sentry,BuildingLink/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,JackDanger/sentry,SilentCircle/sentry,ifduyue/sentry,beeftornado/sentry,hongliang5623/sentry,nicholasserra/sentry,NickPresta/sentry,camilonova/sentry,fotinakis/sentry,jokey2k/sentry,fuziontech/sentry,BuildingLink/sentry,korealerts1/sentry,looker/sentry,wong2/sentry,ifduyue/sentry,BayanGroup/sentry,gencer/sentry,beni55/sentry,mvaled/sentry,SilentCircle/sentry,wujuguang/sentry,imankulov/sentry,jean/sentry,JTCunning/sentry,1tush/sentry,looker/sentry,songyi199111/sentry,zenefits/sentry,jean/sentry,1tush/sentry,ngonzalvez/sentry,TedaLIEz/sentry,daevaorn/sentry,drcapulet/sentry,NickPresta/sentry,BayanGroup/sentry,BuildingLink/sentry,JackDanger/sentry,camilonova/sentry,vperron/sentry,beeftornado/sentry,gencer/sentry,kevinastone/sentry,nicholasserra/sentry,mitsuhiko/sentry,Natim/sentry,looker/sentry,mvaled/sentry,camilonova/sentry,ewdurbin/sentry,korealerts1/sentry,songyi199111/sentry,Kryz/sentry,alexm92/sentry,Natim/sentry,daevaorn/sentry,argonemyth/sentry,ifduyue/sentry,rdio/sentry,kevinastone/sentry,gencer/sentry,jean/sentry,daevaorn/sentry,argonemyth/sentry,ewdurbin/sentry,imankulov/sentry,wujuguang/sentry,fotinakis/sentry,argonemyth/sentry,imankulov/sentry,TedaLIEz/sentry,vperron/sentry,drcapulet/sentry,songyi199111/sentry,BayanGroup/sentry,daevaorn/sentry,kevinlondon/sentry,gg7/sentry,beni55/sentry,gencer/sentry,vperron/sentry,rdio/sentry,pauloschilling/sentry,mvaled/sentry,mvaled/sentry,llonchj/sentry,boneyao/sentry,Kryz/sentry,gg7/sentry,BuildingLink/sentry,jean/sentry,looker/sentry,pauloschilling/sentry,jean/sentry,mitsuhiko/sentry,zenefits/sentry,zenefits/sentry,alexm92/sentry,pauloschilling/sentry,alexm92/sentry,hongliang5623/sentry,mvaled/sentry,wong2/sentry,JTCunning/sentry,beeftornado/sentry,beni55/sentry,JamesMura/sentry,jokey2k/sentry,fuziontech/sentry,korealerts1/sentry,JamesMura/sentry,JTCunning/sentry,llonchj/sentry,JamesMura/sentry,JackDanger/sentry,drcapulet/sentry,fuziontech/sentry,kevinlondon/sentry,gg7/sentry,zenefits/sentry,boneyao/sentry,felixbuenemann/sentry,SilentCircle/sentry,kevinastone/sentry,TedaLIEz/sentry,looker/sentry,rdio/sentry,felixbuenemann/sentry,boneyao/sentry,ewdurbin/sentry,gencer/sentry,ngonzalvez/sentry,ifduyue/sentry,felixbuenemann/sentry,fotinakis/sentry,fotinakis/sentry,nicholasserra/sentry,hongliang5623/sentry,llonchj/sentry,wong2/sentry,kevinlondon/sentry
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
+ self.assertFalse(inst.has_changed('foo'))
|
Add missing assertion in test
|
## Code Before:
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
## Instruction:
Add missing assertion in test
## Code After:
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
+ self.assertFalse(inst.has_changed('foo'))
|
0261a0f9a1dde9f9f6167e3630561219e3dca124
|
statsmodels/datasets/__init__.py
|
statsmodels/datasets/__init__.py
|
#__all__ = filter(lambda s:not s.startswith('_'),dir())
import anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, \
macrodata, nile, randhie, scotland, spector, stackloss, star98, \
strikes, sunspots, fair, heart, statecrime
|
#__all__ = filter(lambda s:not s.startswith('_'),dir())
from . import (anes96, cancer, committee, ccard, copper, cpunish, elnino,
grunfeld, longley, macrodata, nile, randhie, scotland, spector,
stackloss, star98, strikes, sunspots, fair, heart, statecrime)
|
Switch to relative imports and fix pep-8
|
STY: Switch to relative imports and fix pep-8
|
Python
|
bsd-3-clause
|
bsipocz/statsmodels,bsipocz/statsmodels,bsipocz/statsmodels,hlin117/statsmodels,bashtage/statsmodels,nguyentu1602/statsmodels,hlin117/statsmodels,musically-ut/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,wwf5067/statsmodels,bert9bert/statsmodels,nvoron23/statsmodels,bert9bert/statsmodels,astocko/statsmodels,jseabold/statsmodels,YihaoLu/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,kiyoto/statsmodels,astocko/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,alekz112/statsmodels,waynenilsen/statsmodels,phobson/statsmodels,rgommers/statsmodels,nguyentu1602/statsmodels,josef-pkt/statsmodels,gef756/statsmodels,Averroes/statsmodels,phobson/statsmodels,statsmodels/statsmodels,yl565/statsmodels,statsmodels/statsmodels,adammenges/statsmodels,wdurhamh/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,adammenges/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,rgommers/statsmodels,wwf5067/statsmodels,waynenilsen/statsmodels,huongttlan/statsmodels,bavardage/statsmodels,yarikoptic/pystatsmodels,jstoxrocky/statsmodels,musically-ut/statsmodels,ChadFulton/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,huongttlan/statsmodels,hainm/statsmodels,bashtage/statsmodels,bzero/statsmodels,wzbozon/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,musically-ut/statsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,gef756/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,saketkc/statsmodels,josef-pkt/statsmodels,detrout/debian-statsmodels,astocko/statsmodels,wzbozon/statsmodels,yl565/statsmodels,adammenges/statsmodels,hlin117/statsmodels,detrout/debian-statsmodels,bzero/statsmodels,kiyoto/statsmodels,yl565/statsmodels,alekz112/statsmodels,bavardage/statsmodels,nvoron23/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,hainm/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,bsipocz/statsmodels,ChadFulton/statsmodels,wkfwkf/statsmodels,astocko/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,saketkc/statsmodels,musically-ut/statsmodels,jseabold/statsmodels,bavardage/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,bzero/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,wwf5067/statsmodels,wdurhamh/statsmodels,nvoron23/statsmodels,detrout/debian-statsmodels,edhuckle/statsmodels,jseabold/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,adammenges/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,hlin117/statsmodels,YihaoLu/statsmodels,bzero/statsmodels,phobson/statsmodels,nvoron23/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,gef756/statsmodels,bzero/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,detrout/debian-statsmodels,wzbozon/statsmodels,phobson/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,gef756/statsmodels,wwf5067/statsmodels,wkfwkf/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,wzbozon/statsmodels,bavardage/statsmodels,waynenilsen/statsmodels,cbmoore/statsmodels,cbmoore/statsmodels,jstoxrocky/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,kiyoto/statsmodels,saketkc/statsmodels,yarikoptic/pystatsmodels,wdurhamh/statsmodels,gef756/statsmodels,rgommers/statsmodels,yarikoptic/pystatsmodels
|
#__all__ = filter(lambda s:not s.startswith('_'),dir())
- import anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, \
+ from . import (anes96, cancer, committee, ccard, copper, cpunish, elnino,
- macrodata, nile, randhie, scotland, spector, stackloss, star98, \
+ grunfeld, longley, macrodata, nile, randhie, scotland, spector,
- strikes, sunspots, fair, heart, statecrime
+ stackloss, star98, strikes, sunspots, fair, heart, statecrime)
-
|
Switch to relative imports and fix pep-8
|
## Code Before:
#__all__ = filter(lambda s:not s.startswith('_'),dir())
import anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, \
macrodata, nile, randhie, scotland, spector, stackloss, star98, \
strikes, sunspots, fair, heart, statecrime
## Instruction:
Switch to relative imports and fix pep-8
## Code After:
#__all__ = filter(lambda s:not s.startswith('_'),dir())
from . import (anes96, cancer, committee, ccard, copper, cpunish, elnino,
grunfeld, longley, macrodata, nile, randhie, scotland, spector,
stackloss, star98, strikes, sunspots, fair, heart, statecrime)
|
#__all__ = filter(lambda s:not s.startswith('_'),dir())
- import anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, \
? ---------------------
+ from . import (anes96, cancer, committee, ccard, copper, cpunish, elnino,
? +++++++ +
- macrodata, nile, randhie, scotland, spector, stackloss, star98, \
+ grunfeld, longley, macrodata, nile, randhie, scotland, spector,
- strikes, sunspots, fair, heart, statecrime
+ stackloss, star98, strikes, sunspots, fair, heart, statecrime)
? ++++++++++++++++++++++++++ +
-
|
c32e87894d4baf404d5b300459fc68a6d9d973c8
|
zun/db/__init__.py
|
zun/db/__init__.py
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
Remove the duplicated config sqlite_db
|
Remove the duplicated config sqlite_db
The config sqlite_db has been removed from oslo.db. See here:
https://review.openstack.org/#/c/449437/
Change-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff
|
Python
|
apache-2.0
|
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
- options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
+ options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
Remove the duplicated config sqlite_db
|
## Code Before:
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
## Instruction:
Remove the duplicated config sqlite_db
## Code After:
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
- options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
? --------------
+ options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
662287761b8549a86d3fb8c05ec37d47491da120
|
flatblocks/urls.py
|
flatblocks/urls.py
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
]
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path(
r"^edit/(?P<pk>\d+)/$",
staff_member_required(edit),
name="flatblocks-edit",
),
]
|
Use raw string notation for regular expression.
|
Use raw string notation for regular expression.
|
Python
|
bsd-3-clause
|
funkybob/django-flatblocks,funkybob/django-flatblocks
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
- re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
+ re_path(
+ r"^edit/(?P<pk>\d+)/$",
+ staff_member_required(edit),
+ name="flatblocks-edit",
+ ),
]
|
Use raw string notation for regular expression.
|
## Code Before:
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
]
## Instruction:
Use raw string notation for regular expression.
## Code After:
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
re_path(
r"^edit/(?P<pk>\d+)/$",
staff_member_required(edit),
name="flatblocks-edit",
),
]
|
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import re_path
from flatblocks.views import edit
urlpatterns = [
- re_path("^edit/(?P<pk>\d+)/$", staff_member_required(edit), name="flatblocks-edit"),
+ re_path(
+ r"^edit/(?P<pk>\d+)/$",
+ staff_member_required(edit),
+ name="flatblocks-edit",
+ ),
]
|
52b6dac7528232dfd41841f4697c7a78e2a2e675
|
www/src/Lib/_weakref.py
|
www/src/Lib/_weakref.py
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
Add method __call__ to _weaksetref.WeakSet
|
Add method __call__ to _weaksetref.WeakSet
|
Python
|
bsd-3-clause
|
olemis/brython,Lh4cKg/brython,molebot/brython,kikocorreoso/brython,Isendir/brython,Mozhuowen/brython,Isendir/brython,amrdraz/brython,Hasimir/brython,olemis/brython,firmlyjin/brython,JohnDenker/brython,olemis/brython,firmlyjin/brython,Mozhuowen/brython,jonathanverner/brython,molebot/brython,jonathanverner/brython,kevinmel2000/brython,Hasimir/brython,Lh4cKg/brython,JohnDenker/brython,firmlyjin/brython,firmlyjin/brython,Hasimir/brython,amrdraz/brython,Hasimir/brython,molebot/brython,kevinmel2000/brython,rubyinhell/brython,Mozhuowen/brython,Isendir/brython,kikocorreoso/brython,brython-dev/brython,rubyinhell/brython,Lh4cKg/brython,JohnDenker/brython,JohnDenker/brython,rubyinhell/brython,rubyinhell/brython,molebot/brython,jonathanverner/brython,kevinmel2000/brython,Lh4cKg/brython,kevinmel2000/brython,brython-dev/brython,kikocorreoso/brython,jonathanverner/brython,Mozhuowen/brython,firmlyjin/brython,amrdraz/brython,Isendir/brython,amrdraz/brython,olemis/brython,brython-dev/brython
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
+ def __call__(self):
+ return self.obj.obj
+
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
Add method __call__ to _weaksetref.WeakSet
|
## Code Before:
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
## Instruction:
Add method __call__ to _weaksetref.WeakSet
## Code After:
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
+ def __call__(self):
+ return self.obj.obj
+
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
98de0f94332cd2a0faedd1c72d2ee4092552fdb0
|
tests/unit/helper.py
|
tests/unit/helper.py
|
import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
Fix the issue where the mock is persisting calls
|
Fix the issue where the mock is persisting calls
|
Python
|
bsd-3-clause
|
jim-minter/github3.py,wbrefvem/github3.py,agamdua/github3.py,h4ck3rm1k3/github3.py,krxsky/github3.py,balloob/github3.py,ueg1990/github3.py,sigmavirus24/github3.py,icio/github3.py,christophelec/github3.py,itsmemattchung/github3.py,degustaf/github3.py
|
import mock
import github3
import unittest
-
- MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
+ def create_mocked_session(self):
+ MockedSession = mock.create_autospec(github3.session.GitHubSession)
+ return MockedSession()
+
def create_session_mock(self, *args):
- session = MockedSession()
+ session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
Fix the issue where the mock is persisting calls
|
## Code Before:
import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
## Instruction:
Fix the issue where the mock is persisting calls
## Code After:
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
import mock
import github3
import unittest
-
- MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
+ def create_mocked_session(self):
+ MockedSession = mock.create_autospec(github3.session.GitHubSession)
+ return MockedSession()
+
def create_session_mock(self, *args):
- session = MockedSession()
? ^ ^
+ session = self.create_mocked_session()
? ^^^^^^^^^^^^^ ^^
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
28d4538e02d66d06fcba1d386b506502c7bad4a0
|
bakery/views.py
|
bakery/views.py
|
from django.views.generic import ListView, TemplateView
from bakery.cookies.models import Cookie
class HomeView(ListView):
model = Cookie
template_name = 'home.html'
home = HomeView.as_view()
class StylesView(TemplateView):
template_name = 'styles.html'
styles = StylesView.as_view()
|
from django.db.models import Q
from django.views.generic import ListView, TemplateView
from bakery.cookies.models import Cookie
class HomeView(ListView):
model = Cookie
template_name = 'home.html'
def get_queryset(self):
queryset = super(HomeView, self).get_queryset()
search_query = self.request.GET.get('q', None)
if search_query:
q = Q(name__icontains=search_query) | Q(description__icontains=search_query)
queryset = queryset.filter(q)
return queryset
home = HomeView.as_view()
class StylesView(TemplateView):
template_name = 'styles.html'
styles = StylesView.as_view()
|
Integrate search in home view
|
Integrate search in home view
|
Python
|
bsd-3-clause
|
muffins-on-dope/bakery,muffins-on-dope/bakery,muffins-on-dope/bakery
|
+
+ from django.db.models import Q
from django.views.generic import ListView, TemplateView
from bakery.cookies.models import Cookie
class HomeView(ListView):
model = Cookie
template_name = 'home.html'
+
+ def get_queryset(self):
+ queryset = super(HomeView, self).get_queryset()
+ search_query = self.request.GET.get('q', None)
+ if search_query:
+ q = Q(name__icontains=search_query) | Q(description__icontains=search_query)
+ queryset = queryset.filter(q)
+ return queryset
home = HomeView.as_view()
class StylesView(TemplateView):
template_name = 'styles.html'
styles = StylesView.as_view()
|
Integrate search in home view
|
## Code Before:
from django.views.generic import ListView, TemplateView
from bakery.cookies.models import Cookie
class HomeView(ListView):
model = Cookie
template_name = 'home.html'
home = HomeView.as_view()
class StylesView(TemplateView):
template_name = 'styles.html'
styles = StylesView.as_view()
## Instruction:
Integrate search in home view
## Code After:
from django.db.models import Q
from django.views.generic import ListView, TemplateView
from bakery.cookies.models import Cookie
class HomeView(ListView):
model = Cookie
template_name = 'home.html'
def get_queryset(self):
queryset = super(HomeView, self).get_queryset()
search_query = self.request.GET.get('q', None)
if search_query:
q = Q(name__icontains=search_query) | Q(description__icontains=search_query)
queryset = queryset.filter(q)
return queryset
home = HomeView.as_view()
class StylesView(TemplateView):
template_name = 'styles.html'
styles = StylesView.as_view()
|
+
+ from django.db.models import Q
from django.views.generic import ListView, TemplateView
from bakery.cookies.models import Cookie
class HomeView(ListView):
model = Cookie
template_name = 'home.html'
+ def get_queryset(self):
+ queryset = super(HomeView, self).get_queryset()
+ search_query = self.request.GET.get('q', None)
+ if search_query:
+ q = Q(name__icontains=search_query) | Q(description__icontains=search_query)
+ queryset = queryset.filter(q)
+ return queryset
+
home = HomeView.as_view()
class StylesView(TemplateView):
template_name = 'styles.html'
styles = StylesView.as_view()
|
d6f2b132844d1923932447c0ce67c581f723f433
|
wagtail/wagtailadmin/menu.py
|
wagtail/wagtailadmin/menu.py
|
from __future__ import unicode_literals
from six import text_type
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
self.name, self.url, self.classnames, self.label)
|
from __future__ import unicode_literals
from six import text_type
try:
# renamed util -> utils in Django 1.7; try the new name first
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
if attrs:
self.attr_string = flatatt(attrs)
else:
self.attr_string = ""
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
self.name, self.url, self.classnames, self.attr_string, self.label)
|
Support passing html attributes into MenuItem
|
Support passing html attributes into MenuItem
|
Python
|
bsd-3-clause
|
JoshBarr/wagtail,m-sanders/wagtail,hamsterbacke23/wagtail,benemery/wagtail,jordij/wagtail,nutztherookie/wagtail,mixxorz/wagtail,nutztherookie/wagtail,dresiu/wagtail,serzans/wagtail,mixxorz/wagtail,bjesus/wagtail,nrsimha/wagtail,nilnvoid/wagtail,inonit/wagtail,torchbox/wagtail,wagtail/wagtail,dresiu/wagtail,davecranwell/wagtail,timorieber/wagtail,kurtrwall/wagtail,Pennebaker/wagtail,kaedroho/wagtail,kurtrwall/wagtail,thenewguy/wagtail,jnns/wagtail,nealtodd/wagtail,rsalmaso/wagtail,taedori81/wagtail,mephizzle/wagtail,stevenewey/wagtail,quru/wagtail,marctc/wagtail,tangentlabs/wagtail,Klaudit/wagtail,quru/wagtail,gogobook/wagtail,kurtrwall/wagtail,takeshineshiro/wagtail,rsalmaso/wagtail,quru/wagtail,wagtail/wagtail,thenewguy/wagtail,benjaoming/wagtail,mixxorz/wagtail,taedori81/wagtail,nrsimha/wagtail,taedori81/wagtail,nilnvoid/wagtail,zerolab/wagtail,mephizzle/wagtail,rjsproxy/wagtail,darith27/wagtail,benjaoming/wagtail,iho/wagtail,jnns/wagtail,rv816/wagtail,nealtodd/wagtail,torchbox/wagtail,serzans/wagtail,mephizzle/wagtail,WQuanfeng/wagtail,takeflight/wagtail,mjec/wagtail,thenewguy/wagtail,torchbox/wagtail,rjsproxy/wagtail,jorge-marques/wagtail,m-sanders/wagtail,iho/wagtail,benemery/wagtail,serzans/wagtail,stevenewey/wagtail,janusnic/wagtail,JoshBarr/wagtail,chimeno/wagtail,Tivix/wagtail,chimeno/wagtail,nilnvoid/wagtail,Klaudit/wagtail,chrxr/wagtail,marctc/wagtail,KimGlazebrook/wagtail-experiment,gogobook/wagtail,zerolab/wagtail,dresiu/wagtail,takeflight/wagtail,nimasmi/wagtail,nimasmi/wagtail,JoshBarr/wagtail,Pennebaker/wagtail,hanpama/wagtail,davecranwell/wagtail,iansprice/wagtail,kaedroho/wagtail,inonit/wagtail,mixxorz/wagtail,rv816/wagtail,KimGlazebrook/wagtail-experiment,stevenewey/wagtail,inonit/wagtail,jordij/wagtail,kurtw/wagtail,bjesus/wagtail,mephizzle/wagtail,jorge-marques/wagtail,torchbox/wagtail,nilnvoid/wagtail,chimeno/wagtail,gasman/wagtail,mjec/wagtail,dresiu/wagtail,hanpama/wagtail,hamsterbacke23/wagtail,rv816/wagtail,KimGlazebrook/wagtail-experiment,tangentlabs/wagtail,mayapurmedia/wagtail,willcodefortea/wagtail,FlipperPA/wagtail,FlipperPA/wagtail,gogobook/wagtail,timorieber/wagtail,jnns/wagtail,m-sanders/wagtail,nutztherookie/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,mjec/wagtail,thenewguy/wagtail,wagtail/wagtail,kaedroho/wagtail,willcodefortea/wagtail,willcodefortea/wagtail,bjesus/wagtail,gasman/wagtail,chrxr/wagtail,gogobook/wagtail,zerolab/wagtail,rjsproxy/wagtail,wagtail/wagtail,nrsimha/wagtail,Klaudit/wagtail,iho/wagtail,mjec/wagtail,chrxr/wagtail,timorieber/wagtail,FlipperPA/wagtail,benemery/wagtail,mikedingjan/wagtail,mikedingjan/wagtail,gasman/wagtail,janusnic/wagtail,Toshakins/wagtail,WQuanfeng/wagtail,rv816/wagtail,takeflight/wagtail,WQuanfeng/wagtail,janusnic/wagtail,rjsproxy/wagtail,nutztherookie/wagtail,janusnic/wagtail,iansprice/wagtail,JoshBarr/wagtail,jnns/wagtail,takeshineshiro/wagtail,kaedroho/wagtail,willcodefortea/wagtail,taedori81/wagtail,bjesus/wagtail,jorge-marques/wagtail,Tivix/wagtail,darith27/wagtail,marctc/wagtail,mayapurmedia/wagtail,gasman/wagtail,mayapurmedia/wagtail,tangentlabs/wagtail,dresiu/wagtail,iansprice/wagtail,kaedroho/wagtail,kurtw/wagtail,inonit/wagtail,benjaoming/wagtail,Tivix/wagtail,zerolab/wagtail,stevenewey/wagtail,mayapurmedia/wagtail,davecranwell/wagtail,jorge-marques/wagtail,darith27/wagtail,chimeno/wagtail,Toshakins/wagtail,rsalmaso/wagtail,nrsimha/wagtail,gasman/wagtail,Tivix/wagtail,nealtodd/wagtail,Pennebaker/wagtail,rsalmaso/wagtail,jordij/wagtail,jorge-marques/wagtail,benemery/wagtail,iho/wagtail,hamsterbacke23/wagtail,FlipperPA/wagtail,hanpama/wagtail,takeshineshiro/wagtail,kurtw/wagtail,nimasmi/wagtail,iansprice/wagtail,kurtrwall/wagtail,nealtodd/wagtail,davecranwell/wagtail,rsalmaso/wagtail,timorieber/wagtail,Pennebaker/wagtail,Klaudit/wagtail,serzans/wagtail,m-sanders/wagtail,marctc/wagtail,taedori81/wagtail,darith27/wagtail,thenewguy/wagtail,chimeno/wagtail,zerolab/wagtail,takeflight/wagtail,chrxr/wagtail,mikedingjan/wagtail,Toshakins/wagtail,mikedingjan/wagtail,KimGlazebrook/wagtail-experiment,quru/wagtail,Toshakins/wagtail,tangentlabs/wagtail,WQuanfeng/wagtail,kurtw/wagtail,mixxorz/wagtail,hanpama/wagtail,takeshineshiro/wagtail,benjaoming/wagtail,jordij/wagtail,wagtail/wagtail
|
from __future__ import unicode_literals
from six import text_type
+
+ try:
+ # renamed util -> utils in Django 1.7; try the new name first
+ from django.forms.utils import flatatt
+ except ImportError:
+ from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
- def __init__(self, label, url, name=None, classnames='', order=1000):
+ def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
+ if attrs:
+ self.attr_string = flatatt(attrs)
+ else:
+ self.attr_string = ""
+
def render_html(self):
return format_html(
- """<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
+ """<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
- self.name, self.url, self.classnames, self.label)
+ self.name, self.url, self.classnames, self.attr_string, self.label)
|
Support passing html attributes into MenuItem
|
## Code Before:
from __future__ import unicode_literals
from six import text_type
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
self.name, self.url, self.classnames, self.label)
## Instruction:
Support passing html attributes into MenuItem
## Code After:
from __future__ import unicode_literals
from six import text_type
try:
# renamed util -> utils in Django 1.7; try the new name first
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
if attrs:
self.attr_string = flatatt(attrs)
else:
self.attr_string = ""
def render_html(self):
return format_html(
"""<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
self.name, self.url, self.classnames, self.attr_string, self.label)
|
from __future__ import unicode_literals
from six import text_type
+
+ try:
+ # renamed util -> utils in Django 1.7; try the new name first
+ from django.forms.utils import flatatt
+ except ImportError:
+ from django.forms.util import flatatt
from django.utils.text import slugify
from django.utils.html import format_html
class MenuItem(object):
- def __init__(self, label, url, name=None, classnames='', order=1000):
+ def __init__(self, label, url, name=None, classnames='', attrs=None, order=1000):
? ++++++++++++
self.label = label
self.url = url
self.classnames = classnames
self.name = (name or slugify(text_type(label)))
self.order = order
+ if attrs:
+ self.attr_string = flatatt(attrs)
+ else:
+ self.attr_string = ""
+
def render_html(self):
return format_html(
- """<li class="menu-{0}"><a href="{1}" class="{2}">{3}</a></li>""",
? ^
+ """<li class="menu-{0}"><a href="{1}" class="{2}"{3}>{4}</a></li>""",
? +++ ^
- self.name, self.url, self.classnames, self.label)
+ self.name, self.url, self.classnames, self.attr_string, self.label)
? ++++++++++++++++++
|
cb6fa6b54ca3e1908037a1b1a3399d8bd4b1be58
|
djoser/compat.py
|
djoser/compat.py
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
Fix invalid fallback leading to circular calls
|
Fix invalid fallback leading to circular calls
Remove redundant finally
|
Python
|
mit
|
sunscrapers/djoser,akalipetis/djoser,sunscrapers/djoser,sunscrapers/djoser,akalipetis/djoser
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
- except ImportError:
+ except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
Fix invalid fallback leading to circular calls
|
## Code Before:
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
## Instruction:
Fix invalid fallback leading to circular calls
## Code After:
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
- except ImportError:
+ except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
507a4f7f931c12c9883ff1644f5d0cc44270d5c2
|
salt/thorium/status.py
|
salt/thorium/status.py
|
'''
This thorium state is used to track the status beacon events and keep track of
the active status of minions
.. versionadded:: 2016.11.0
'''
# Import python libs
from __future__ import absolute_import
import time
import fnmatch
def reg(name):
'''
Activate this register to turn on a minion status tracking register, this
register keeps the current status beacon data and the time that each beacon
was last checked in.
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
now = time.time()
if 'status' not in __reg__:
__reg__['status'] = {}
__reg__['status']['val'] = {}
for event in __events__:
if fnmatch.fnmatch(event['tag'], 'salt/beacon/*/status/*'):
# Got one!
idata = {'recv_time': now}
for key in event['data']['data']:
if key in ('id', 'recv_time'):
continue
idata[key] = event['data'][key]
__reg__['status']['val'][event['data']['data']['id']] = idata
ret['changes'][event['data']['data']['id']] = True
return ret
|
'''
This thorium state is used to track the status beacon events and keep track of
the active status of minions
.. versionadded:: 2016.11.0
'''
# Import python libs
from __future__ import absolute_import
import time
import fnmatch
def reg(name):
'''
Activate this register to turn on a minion status tracking register, this
register keeps the current status beacon data and the time that each beacon
was last checked in.
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
now = time.time()
if 'status' not in __reg__:
__reg__['status'] = {}
__reg__['status']['val'] = {}
for event in __events__:
if fnmatch.fnmatch(event['tag'], 'salt/beacon/*/status/*'):
# Got one!
idata = {'recv_time': now}
for key in event['data']['data']:
if key in ('id', 'recv_time'):
continue
idata[key] = event['data']['data'][key]
__reg__['status']['val'][event['data']['id']] = idata
ret['changes'][event['data']['id']] = True
return ret
|
Reorder keys that were being declared in the wrong place
|
Reorder keys that were being declared in the wrong place
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
This thorium state is used to track the status beacon events and keep track of
the active status of minions
.. versionadded:: 2016.11.0
'''
# Import python libs
from __future__ import absolute_import
import time
import fnmatch
def reg(name):
'''
Activate this register to turn on a minion status tracking register, this
register keeps the current status beacon data and the time that each beacon
was last checked in.
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
now = time.time()
if 'status' not in __reg__:
__reg__['status'] = {}
__reg__['status']['val'] = {}
for event in __events__:
if fnmatch.fnmatch(event['tag'], 'salt/beacon/*/status/*'):
# Got one!
idata = {'recv_time': now}
for key in event['data']['data']:
if key in ('id', 'recv_time'):
continue
- idata[key] = event['data'][key]
+ idata[key] = event['data']['data'][key]
- __reg__['status']['val'][event['data']['data']['id']] = idata
+ __reg__['status']['val'][event['data']['id']] = idata
- ret['changes'][event['data']['data']['id']] = True
+ ret['changes'][event['data']['id']] = True
return ret
|
Reorder keys that were being declared in the wrong place
|
## Code Before:
'''
This thorium state is used to track the status beacon events and keep track of
the active status of minions
.. versionadded:: 2016.11.0
'''
# Import python libs
from __future__ import absolute_import
import time
import fnmatch
def reg(name):
'''
Activate this register to turn on a minion status tracking register, this
register keeps the current status beacon data and the time that each beacon
was last checked in.
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
now = time.time()
if 'status' not in __reg__:
__reg__['status'] = {}
__reg__['status']['val'] = {}
for event in __events__:
if fnmatch.fnmatch(event['tag'], 'salt/beacon/*/status/*'):
# Got one!
idata = {'recv_time': now}
for key in event['data']['data']:
if key in ('id', 'recv_time'):
continue
idata[key] = event['data'][key]
__reg__['status']['val'][event['data']['data']['id']] = idata
ret['changes'][event['data']['data']['id']] = True
return ret
## Instruction:
Reorder keys that were being declared in the wrong place
## Code After:
'''
This thorium state is used to track the status beacon events and keep track of
the active status of minions
.. versionadded:: 2016.11.0
'''
# Import python libs
from __future__ import absolute_import
import time
import fnmatch
def reg(name):
'''
Activate this register to turn on a minion status tracking register, this
register keeps the current status beacon data and the time that each beacon
was last checked in.
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
now = time.time()
if 'status' not in __reg__:
__reg__['status'] = {}
__reg__['status']['val'] = {}
for event in __events__:
if fnmatch.fnmatch(event['tag'], 'salt/beacon/*/status/*'):
# Got one!
idata = {'recv_time': now}
for key in event['data']['data']:
if key in ('id', 'recv_time'):
continue
idata[key] = event['data']['data'][key]
__reg__['status']['val'][event['data']['id']] = idata
ret['changes'][event['data']['id']] = True
return ret
|
'''
This thorium state is used to track the status beacon events and keep track of
the active status of minions
.. versionadded:: 2016.11.0
'''
# Import python libs
from __future__ import absolute_import
import time
import fnmatch
def reg(name):
'''
Activate this register to turn on a minion status tracking register, this
register keeps the current status beacon data and the time that each beacon
was last checked in.
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
now = time.time()
if 'status' not in __reg__:
__reg__['status'] = {}
__reg__['status']['val'] = {}
for event in __events__:
if fnmatch.fnmatch(event['tag'], 'salt/beacon/*/status/*'):
# Got one!
idata = {'recv_time': now}
for key in event['data']['data']:
if key in ('id', 'recv_time'):
continue
- idata[key] = event['data'][key]
+ idata[key] = event['data']['data'][key]
? ++++++++
- __reg__['status']['val'][event['data']['data']['id']] = idata
? --------
+ __reg__['status']['val'][event['data']['id']] = idata
- ret['changes'][event['data']['data']['id']] = True
? --------
+ ret['changes'][event['data']['id']] = True
return ret
|
94c98ad923f1a136bcf14b81d559f634c1bc262e
|
populous/generators/select.py
|
populous/generators/select.py
|
from .base import Generator
class Select(Generator):
def get_arguments(self, table=None, where=None, pk='id', **kwargs):
super(Select, self).get_arguments(**kwargs)
self.table = table
self.where = where
self.pk = pk
def generate(self):
backend = self.blueprint.backend
while True:
values = backend.select_random(self.table, fields=(self.pk,),
where=self.where, max_rows=10000)
for value in values:
yield value
|
from .base import Generator
from .vars import parse_vars
class Select(Generator):
def get_arguments(self, table=None, where=None, pk='id', **kwargs):
super(Select, self).get_arguments(**kwargs)
self.table = table
self.where = parse_vars(where)
self.pk = pk
def generate(self):
backend = self.blueprint.backend
while True:
where = self.evaluate(self.where)
values = backend.select_random(self.table, fields=(self.pk,),
where=where, max_rows=10000)
for value in values:
if self.evaluate(self.where) != where:
break
yield value
|
Handle where with variables in Select generator
|
Handle where with variables in Select generator
|
Python
|
mit
|
novafloss/populous
|
from .base import Generator
+ from .vars import parse_vars
class Select(Generator):
def get_arguments(self, table=None, where=None, pk='id', **kwargs):
super(Select, self).get_arguments(**kwargs)
self.table = table
- self.where = where
+ self.where = parse_vars(where)
self.pk = pk
def generate(self):
backend = self.blueprint.backend
while True:
+ where = self.evaluate(self.where)
values = backend.select_random(self.table, fields=(self.pk,),
- where=self.where, max_rows=10000)
+ where=where, max_rows=10000)
for value in values:
+ if self.evaluate(self.where) != where:
+ break
yield value
|
Handle where with variables in Select generator
|
## Code Before:
from .base import Generator
class Select(Generator):
def get_arguments(self, table=None, where=None, pk='id', **kwargs):
super(Select, self).get_arguments(**kwargs)
self.table = table
self.where = where
self.pk = pk
def generate(self):
backend = self.blueprint.backend
while True:
values = backend.select_random(self.table, fields=(self.pk,),
where=self.where, max_rows=10000)
for value in values:
yield value
## Instruction:
Handle where with variables in Select generator
## Code After:
from .base import Generator
from .vars import parse_vars
class Select(Generator):
def get_arguments(self, table=None, where=None, pk='id', **kwargs):
super(Select, self).get_arguments(**kwargs)
self.table = table
self.where = parse_vars(where)
self.pk = pk
def generate(self):
backend = self.blueprint.backend
while True:
where = self.evaluate(self.where)
values = backend.select_random(self.table, fields=(self.pk,),
where=where, max_rows=10000)
for value in values:
if self.evaluate(self.where) != where:
break
yield value
|
from .base import Generator
+ from .vars import parse_vars
class Select(Generator):
def get_arguments(self, table=None, where=None, pk='id', **kwargs):
super(Select, self).get_arguments(**kwargs)
self.table = table
- self.where = where
+ self.where = parse_vars(where)
? +++++++++++ +
self.pk = pk
def generate(self):
backend = self.blueprint.backend
while True:
+ where = self.evaluate(self.where)
values = backend.select_random(self.table, fields=(self.pk,),
- where=self.where, max_rows=10000)
? -----
+ where=where, max_rows=10000)
for value in values:
+ if self.evaluate(self.where) != where:
+ break
yield value
|
16d99a20088e81045e34999b6045e9222d510cd5
|
app.py
|
app.py
|
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
|
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
try:
from raven import Client
from raven.contrib.celery import register_signal
except ImportError:
pass
else:
if os.environ.get('SENTRY_DSN'):
register_signal(Client(os.environ.get('SENTRY_DSN')))
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
|
Use raven for logging if available
|
Use raven for logging if available
|
Python
|
bsd-3-clause
|
fulfilio/trytond-async,tarunbhardwaj/trytond-async
|
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
+
+ try:
+ from raven import Client
+ from raven.contrib.celery import register_signal
+ except ImportError:
+ pass
+ else:
+ if os.environ.get('SENTRY_DSN'):
+ register_signal(Client(os.environ.get('SENTRY_DSN')))
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
|
Use raven for logging if available
|
## Code Before:
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
## Instruction:
Use raven for logging if available
## Code After:
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
try:
from raven import Client
from raven.contrib.celery import register_signal
except ImportError:
pass
else:
if os.environ.get('SENTRY_DSN'):
register_signal(Client(os.environ.get('SENTRY_DSN')))
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
|
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
+
+ try:
+ from raven import Client
+ from raven.contrib.celery import register_signal
+ except ImportError:
+ pass
+ else:
+ if os.environ.get('SENTRY_DSN'):
+ register_signal(Client(os.environ.get('SENTRY_DSN')))
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
|
1a35294baac296bbd665dc55e35cd2b383ae7116
|
alembic/versions/87acbbe5887b_add_freeform_profile_for_user.py
|
alembic/versions/87acbbe5887b_add_freeform_profile_for_user.py
|
# revision identifiers, used by Alembic.
revision = '87acbbe5887b'
down_revision = 'f2a71c7b93b6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False))
def downgrade():
op.drop_column('user', 'profile_data')
|
# revision identifiers, used by Alembic.
revision = '87acbbe5887b'
down_revision = 'f2a71c7b93b6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False, server_default=u'{}'))
def downgrade():
op.drop_column('user', 'profile_data')
|
Set default value for user profile data
|
Set default value for user profile data
|
Python
|
mit
|
katajakasa/aetherguild2,katajakasa/aetherguild2
|
# revision identifiers, used by Alembic.
revision = '87acbbe5887b'
down_revision = 'f2a71c7b93b6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
- op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False))
+ op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False, server_default=u'{}'))
def downgrade():
op.drop_column('user', 'profile_data')
|
Set default value for user profile data
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '87acbbe5887b'
down_revision = 'f2a71c7b93b6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False))
def downgrade():
op.drop_column('user', 'profile_data')
## Instruction:
Set default value for user profile data
## Code After:
# revision identifiers, used by Alembic.
revision = '87acbbe5887b'
down_revision = 'f2a71c7b93b6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False, server_default=u'{}'))
def downgrade():
op.drop_column('user', 'profile_data')
|
# revision identifiers, used by Alembic.
revision = '87acbbe5887b'
down_revision = 'f2a71c7b93b6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
- op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False))
+ op.add_column('user', sa.Column('profile_data', sa.Text(), nullable=False, server_default=u'{}'))
? ++++++++++++++++++++++
def downgrade():
op.drop_column('user', 'profile_data')
|
d3a24fae87005b7f5c47657851b4341726494383
|
atest/resources/atest_variables.py
|
atest/resources/atest_variables.py
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
Fix getting Windows system encoding on non-ASCII envs
|
atests: Fix getting Windows system encoding on non-ASCII envs
|
Python
|
apache-2.0
|
HelioGuilherme66/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
- output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
+ output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
+ errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
Fix getting Windows system encoding on non-ASCII envs
|
## Code Before:
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
## Instruction:
Fix getting Windows system encoding on non-ASCII envs
## Code After:
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
from os.path import abspath, dirname, join, normpath
import locale
import os
import subprocess
import robot
__all__ = ['ROBOTPATH', 'ROBOT_VERSION', 'DATADIR', 'SYSTEM_ENCODING',
'CONSOLE_ENCODING']
ROBOTPATH = dirname(abspath(robot.__file__))
ROBOT_VERSION = robot.version.get_version()
DATADIR = normpath(join(dirname(abspath(__file__)), '..', 'testdata'))
SYSTEM_ENCODING = locale.getpreferredencoding(False)
# Python 3.6+ uses UTF-8 internally on Windows. We want real console encoding.
if os.name == 'nt':
- output = subprocess.check_output('chcp', shell=True, encoding='ASCII')
? ^
+ output = subprocess.check_output('chcp', shell=True, encoding='ASCII',
? ^
+ errors='ignore')
CONSOLE_ENCODING = 'cp' + output.split()[-1]
else:
CONSOLE_ENCODING = locale.getdefaultlocale()[-1]
|
30bfe04e0fa1386e263cbd0e8dbc6f3689f9cb21
|
connector_carepoint/migrations/9.0.1.3.0/pre-migrate.py
|
connector_carepoint/migrations/9.0.1.3.0/pre-migrate.py
|
def migrate(cr, version):
cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
'RENAME TO carepoint_rx_ord_ln')
cr.execute('ALTER TABLE carepoint_carepoint_organization '
'RENAME TO carepoint_org_bind')
|
import logging
_logger = logging.getLogger(__name__)
def migrate(cr, version):
try:
cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
'RENAME TO carepoint_rx_ord_ln')
except Exception:
cr.rollback()
_logger.exception('Cannot perform migration')
try:
cr.execute('ALTER TABLE carepoint_carepoint_organization '
'RENAME TO carepoint_org_bind')
except Exception:
cr.rollback()
_logger.exception('Cannot perform migration')
|
Fix prescription migration * Add try/catch & rollback to db alterations in case server re-upgrades
|
[FIX] connector_carepoint: Fix prescription migration
* Add try/catch & rollback to db alterations in case server re-upgrades
|
Python
|
agpl-3.0
|
laslabs/odoo-connector-carepoint
|
+
+ import logging
+
+ _logger = logging.getLogger(__name__)
def migrate(cr, version):
+ try:
- cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
+ cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
- 'RENAME TO carepoint_rx_ord_ln')
+ 'RENAME TO carepoint_rx_ord_ln')
+ except Exception:
+ cr.rollback()
+ _logger.exception('Cannot perform migration')
+ try:
- cr.execute('ALTER TABLE carepoint_carepoint_organization '
+ cr.execute('ALTER TABLE carepoint_carepoint_organization '
- 'RENAME TO carepoint_org_bind')
+ 'RENAME TO carepoint_org_bind')
+ except Exception:
+ cr.rollback()
+ _logger.exception('Cannot perform migration')
|
Fix prescription migration * Add try/catch & rollback to db alterations in case server re-upgrades
|
## Code Before:
def migrate(cr, version):
cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
'RENAME TO carepoint_rx_ord_ln')
cr.execute('ALTER TABLE carepoint_carepoint_organization '
'RENAME TO carepoint_org_bind')
## Instruction:
Fix prescription migration * Add try/catch & rollback to db alterations in case server re-upgrades
## Code After:
import logging
_logger = logging.getLogger(__name__)
def migrate(cr, version):
try:
cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
'RENAME TO carepoint_rx_ord_ln')
except Exception:
cr.rollback()
_logger.exception('Cannot perform migration')
try:
cr.execute('ALTER TABLE carepoint_carepoint_organization '
'RENAME TO carepoint_org_bind')
except Exception:
cr.rollback()
_logger.exception('Cannot perform migration')
|
+
+ import logging
+
+ _logger = logging.getLogger(__name__)
def migrate(cr, version):
+ try:
- cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
+ cr.execute('ALTER TABLE carepoint_medical_prescription_order_line '
? ++++
- 'RENAME TO carepoint_rx_ord_ln')
+ 'RENAME TO carepoint_rx_ord_ln')
? ++++
+ except Exception:
+ cr.rollback()
+ _logger.exception('Cannot perform migration')
+ try:
- cr.execute('ALTER TABLE carepoint_carepoint_organization '
+ cr.execute('ALTER TABLE carepoint_carepoint_organization '
? ++++
- 'RENAME TO carepoint_org_bind')
+ 'RENAME TO carepoint_org_bind')
? ++++
+ except Exception:
+ cr.rollback()
+ _logger.exception('Cannot perform migration')
|
0b6db0b19e9389b1c5e62ddab5cdab4886364252
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='slowboy',
version='0.0.1',
packages=['slowboy'],
url='https://github.com/zmarvel/slowboy/',
author='Zack Marvel',
author_email='zpmarvel at gmail dot com',
install_requires=[
"Pillow==4.1.1",
"PySDL2==0.9.5",
])
|
from distutils.core import setup
setup(name='slowboy',
version='0.0.1',
packages=['slowboy'],
url='https://github.com/zmarvel/slowboy/',
author='Zack Marvel',
author_email='zpmarvel at gmail dot com',
install_requires=[
"PySDL2",
],
extras_require={
"dev": [
"Pillow",
"pytest",
],
}
)
|
Move Pillow to development deps; add pytest to development deps
|
Move Pillow to development deps; add pytest to development deps
|
Python
|
mit
|
zmarvel/slowboy
|
from distutils.core import setup
setup(name='slowboy',
version='0.0.1',
packages=['slowboy'],
url='https://github.com/zmarvel/slowboy/',
author='Zack Marvel',
author_email='zpmarvel at gmail dot com',
install_requires=[
- "Pillow==4.1.1",
- "PySDL2==0.9.5",
+ "PySDL2",
- ])
+ ],
+ extras_require={
+ "dev": [
+ "Pillow",
+ "pytest",
+ ],
+ }
+ )
|
Move Pillow to development deps; add pytest to development deps
|
## Code Before:
from distutils.core import setup
setup(name='slowboy',
version='0.0.1',
packages=['slowboy'],
url='https://github.com/zmarvel/slowboy/',
author='Zack Marvel',
author_email='zpmarvel at gmail dot com',
install_requires=[
"Pillow==4.1.1",
"PySDL2==0.9.5",
])
## Instruction:
Move Pillow to development deps; add pytest to development deps
## Code After:
from distutils.core import setup
setup(name='slowboy',
version='0.0.1',
packages=['slowboy'],
url='https://github.com/zmarvel/slowboy/',
author='Zack Marvel',
author_email='zpmarvel at gmail dot com',
install_requires=[
"PySDL2",
],
extras_require={
"dev": [
"Pillow",
"pytest",
],
}
)
|
from distutils.core import setup
setup(name='slowboy',
version='0.0.1',
packages=['slowboy'],
url='https://github.com/zmarvel/slowboy/',
author='Zack Marvel',
author_email='zpmarvel at gmail dot com',
install_requires=[
- "Pillow==4.1.1",
- "PySDL2==0.9.5",
? -------
+ "PySDL2",
- ])
? ^
+ ],
? ^
+ extras_require={
+ "dev": [
+ "Pillow",
+ "pytest",
+ ],
+ }
+ )
|
5085e2f8c97ecab6617b4f7b0c8250095d47b22d
|
boardinghouse/templatetags/boardinghouse.py
|
boardinghouse/templatetags/boardinghouse.py
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
Remove a database access from the template tag.
|
Remove a database access from the template tag.
--HG--
branch : schema-invitations
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
from django import template
from ..schema import is_shared_model as _is_shared_model
- from ..schema import get_schema_model
+ from ..schema import _get_schema
-
- Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
- def schema_name(pk):
+ def schema_name(schema):
try:
- return Schema.objects.get(pk=pk).name
- except Schema.DoesNotExist:
+ return _get_schema(schema).name
+ except AttributeError:
return "no schema"
|
Remove a database access from the template tag.
|
## Code Before:
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
## Instruction:
Remove a database access from the template tag.
## Code After:
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
from django import template
from ..schema import is_shared_model as _is_shared_model
- from ..schema import get_schema_model
? ------
+ from ..schema import _get_schema
? +
-
- Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
- def schema_name(pk):
? ^^
+ def schema_name(schema):
? ^^^^^^
try:
- return Schema.objects.get(pk=pk).name
- except Schema.DoesNotExist:
+ return _get_schema(schema).name
+ except AttributeError:
return "no schema"
|
399430076227f42f5d168c5b2264933c32f4b52a
|
lib/ansible/release.py
|
lib/ansible/release.py
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
Update ansible version number to 2.8.0.dev0
|
Update ansible version number to 2.8.0.dev0
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
- __version__ = '2.7.0.a1.post0'
+ __version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
- __codename__ = 'In the Light'
+ __codename__ = 'TBD'
|
Update ansible version number to 2.8.0.dev0
|
## Code Before:
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
## Instruction:
Update ansible version number to 2.8.0.dev0
## Code After:
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
- __version__ = '2.7.0.a1.post0'
? ^ ^^^^^^^
+ __version__ = '2.8.0.dev0'
? ^ ^^^
__author__ = 'Ansible, Inc.'
- __codename__ = 'In the Light'
+ __codename__ = 'TBD'
|
cc62a1eea746a7191b4a07a48dcf55f4c76787ee
|
asyncpg/__init__.py
|
asyncpg/__init__.py
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
Use loop.create_future if it exists
|
Use loop.create_future if it exists
|
Python
|
apache-2.0
|
MagicStack/asyncpg,MagicStack/asyncpg
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
- waiter = asyncio.Future(loop=self._loop)
+ waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
- waiter = asyncio.Future(loop=self._loop)
+ waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
- waiter = asyncio.Future(loop=self._connection._loop)
+ waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
+
+ def _create_future(loop):
+ try:
+ create_future = loop.create_future
+ except AttributeError:
+ return asyncio.Future(loop=loop)
+ else:
+ return create_future()
+
|
Use loop.create_future if it exists
|
## Code Before:
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
## Instruction:
Use loop.create_future if it exists
## Code After:
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
- waiter = asyncio.Future(loop=self._loop)
? ^^^^^^^^ -----
+ waiter = _create_future(self._loop)
? ++++ ^^^^
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
- waiter = asyncio.Future(loop=self._loop)
? ^^^^^^^^ -----
+ waiter = _create_future(self._loop)
? ++++ ^^^^
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
- waiter = asyncio.Future(loop=self._connection._loop)
? ^^^^^^^^ -----
+ waiter = _create_future(self._connection._loop)
? ++++ ^^^^
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
+
+
+ def _create_future(loop):
+ try:
+ create_future = loop.create_future
+ except AttributeError:
+ return asyncio.Future(loop=loop)
+ else:
+ return create_future()
|
ee425b43502054895986c447e4cdae2c7e6c9278
|
Lib/fontTools/misc/timeTools.py
|
Lib/fontTools/misc/timeTools.py
|
"""fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
except ValueError:
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
|
"""fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
# https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
except (OverflowError, ValueError):
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
|
Adjust for Python 3.3 change in gmtime() exception type
|
Adjust for Python 3.3 change in gmtime() exception type
https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
Fixes https://github.com/behdad/fonttools/issues/99
|
Python
|
mit
|
googlefonts/fonttools,fonttools/fonttools
|
"""fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
+ # https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
- except ValueError:
+ except (OverflowError, ValueError):
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
|
Adjust for Python 3.3 change in gmtime() exception type
|
## Code Before:
"""fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
except ValueError:
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
## Instruction:
Adjust for Python 3.3 change in gmtime() exception type
## Code After:
"""fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
# https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
except (OverflowError, ValueError):
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
|
"""fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
+ # https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
- except ValueError:
+ except (OverflowError, ValueError):
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
|
f0c7e1b8a2de6f7e9445e2158cf679f399df6545
|
jupyternotify/jupyternotify.py
|
jupyternotify/jupyternotify.py
|
import uuid
from IPython.core.getipython import get_ipython
from IPython.core.magic import Magics, magics_class, cell_magic
from IPython.display import display, Javascript
from pkg_resources import resource_filename
@magics_class
class JupyterNotifyMagics(Magics):
def __init__(self, shell):
super().__init__(shell)
with open(resource_filename("jupyternotify", "js/init.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString))
@cell_magic
def notify(self, line, cell):
# generate a uuid so that we only deliver this notification once, not again
# when the browser reloads (we append a div to check that)
notification_uuid = uuid.uuid4()
output = get_ipython().run_cell(cell)
# display our browser notification using javascript
with open(resource_filename("jupyternotify", "js/notify.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString % {"notification_uuid": notification_uuid}))
# finally, if we generated an exception, print the traceback
if output.error_in_exec is not None:
output.raise_error()
|
import uuid
from IPython.core.getipython import get_ipython
from IPython.core.magic import Magics, magics_class, cell_magic
from IPython.display import display, Javascript
from pkg_resources import resource_filename
@magics_class
class JupyterNotifyMagics(Magics):
def __init__(self, shell):
super(JupyterNotifyMagics, self).__init__(shell)
with open(resource_filename("jupyternotify", "js/init.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString))
@cell_magic
def notify(self, line, cell):
# generate a uuid so that we only deliver this notification once, not again
# when the browser reloads (we append a div to check that)
notification_uuid = uuid.uuid4()
output = get_ipython().run_cell(cell)
# display our browser notification using javascript
with open(resource_filename("jupyternotify", "js/notify.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString % {"notification_uuid": notification_uuid}))
# finally, if we generated an exception, print the traceback
if output.error_in_exec is not None:
output.raise_error()
|
Make this work with python2 too.
|
Make this work with python2 too.
|
Python
|
bsd-3-clause
|
ShopRunner/jupyter-notify,ShopRunner/jupyter-notify
|
import uuid
from IPython.core.getipython import get_ipython
from IPython.core.magic import Magics, magics_class, cell_magic
from IPython.display import display, Javascript
from pkg_resources import resource_filename
@magics_class
class JupyterNotifyMagics(Magics):
def __init__(self, shell):
- super().__init__(shell)
+ super(JupyterNotifyMagics, self).__init__(shell)
with open(resource_filename("jupyternotify", "js/init.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString))
@cell_magic
def notify(self, line, cell):
# generate a uuid so that we only deliver this notification once, not again
# when the browser reloads (we append a div to check that)
notification_uuid = uuid.uuid4()
output = get_ipython().run_cell(cell)
# display our browser notification using javascript
with open(resource_filename("jupyternotify", "js/notify.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString % {"notification_uuid": notification_uuid}))
# finally, if we generated an exception, print the traceback
if output.error_in_exec is not None:
output.raise_error()
|
Make this work with python2 too.
|
## Code Before:
import uuid
from IPython.core.getipython import get_ipython
from IPython.core.magic import Magics, magics_class, cell_magic
from IPython.display import display, Javascript
from pkg_resources import resource_filename
@magics_class
class JupyterNotifyMagics(Magics):
def __init__(self, shell):
super().__init__(shell)
with open(resource_filename("jupyternotify", "js/init.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString))
@cell_magic
def notify(self, line, cell):
# generate a uuid so that we only deliver this notification once, not again
# when the browser reloads (we append a div to check that)
notification_uuid = uuid.uuid4()
output = get_ipython().run_cell(cell)
# display our browser notification using javascript
with open(resource_filename("jupyternotify", "js/notify.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString % {"notification_uuid": notification_uuid}))
# finally, if we generated an exception, print the traceback
if output.error_in_exec is not None:
output.raise_error()
## Instruction:
Make this work with python2 too.
## Code After:
import uuid
from IPython.core.getipython import get_ipython
from IPython.core.magic import Magics, magics_class, cell_magic
from IPython.display import display, Javascript
from pkg_resources import resource_filename
@magics_class
class JupyterNotifyMagics(Magics):
def __init__(self, shell):
super(JupyterNotifyMagics, self).__init__(shell)
with open(resource_filename("jupyternotify", "js/init.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString))
@cell_magic
def notify(self, line, cell):
# generate a uuid so that we only deliver this notification once, not again
# when the browser reloads (we append a div to check that)
notification_uuid = uuid.uuid4()
output = get_ipython().run_cell(cell)
# display our browser notification using javascript
with open(resource_filename("jupyternotify", "js/notify.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString % {"notification_uuid": notification_uuid}))
# finally, if we generated an exception, print the traceback
if output.error_in_exec is not None:
output.raise_error()
|
import uuid
from IPython.core.getipython import get_ipython
from IPython.core.magic import Magics, magics_class, cell_magic
from IPython.display import display, Javascript
from pkg_resources import resource_filename
@magics_class
class JupyterNotifyMagics(Magics):
def __init__(self, shell):
- super().__init__(shell)
+ super(JupyterNotifyMagics, self).__init__(shell)
with open(resource_filename("jupyternotify", "js/init.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString))
@cell_magic
def notify(self, line, cell):
# generate a uuid so that we only deliver this notification once, not again
# when the browser reloads (we append a div to check that)
notification_uuid = uuid.uuid4()
output = get_ipython().run_cell(cell)
# display our browser notification using javascript
with open(resource_filename("jupyternotify", "js/notify.js")) as jsFile:
jsString = jsFile.read()
display(Javascript(jsString % {"notification_uuid": notification_uuid}))
# finally, if we generated an exception, print the traceback
if output.error_in_exec is not None:
output.raise_error()
|
3eb37589ab7a2e58922a69f42bbc1ec443df44ed
|
addons/purchase/models/stock_config_settings.py
|
addons/purchase/models/stock_config_settings.py
|
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
po_lead = fields.Float(related='company_id.po_lead', default=lambda self: self.env.user.company_id.po_lead)
use_po_lead = fields.Boolean(
string="Security Lead Time for Purchase",
oldname='default_new_po_lead',
help="Margin of error for vendor lead times. When the system generates Purchase Orders for reordering products,they will be scheduled that many days earlier to cope with unexpected vendor delays.")
@api.onchange('use_po_lead')
def _onchange_use_po_lead(self):
if not self.use_po_lead:
self.po_lead = 0.0
def get_default_fields(self, fields):
return dict(
use_po_lead=self.env['ir.config_parameter'].sudo().get_param('purchase.use_po_lead')
)
def set_fields(self):
self.env['ir.config_parameter'].sudo().set_param('purchase.use_po_lead', self.use_po_lead)
|
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
po_lead = fields.Float(related='company_id.po_lead')
use_po_lead = fields.Boolean(
string="Security Lead Time for Purchase",
oldname='default_new_po_lead',
help="Margin of error for vendor lead times. When the system generates Purchase Orders for reordering products,they will be scheduled that many days earlier to cope with unexpected vendor delays.")
@api.onchange('use_po_lead')
def _onchange_use_po_lead(self):
if not self.use_po_lead:
self.po_lead = 0.0
def get_default_fields(self, fields):
return dict(
use_po_lead=self.env['ir.config_parameter'].sudo().get_param('purchase.use_po_lead')
)
def set_fields(self):
self.env['ir.config_parameter'].sudo().set_param('purchase.use_po_lead', self.use_po_lead)
|
Remove useless default value for po_lead
|
[IMP] purchase: Remove useless default value for po_lead
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
- po_lead = fields.Float(related='company_id.po_lead', default=lambda self: self.env.user.company_id.po_lead)
+ po_lead = fields.Float(related='company_id.po_lead')
use_po_lead = fields.Boolean(
string="Security Lead Time for Purchase",
oldname='default_new_po_lead',
help="Margin of error for vendor lead times. When the system generates Purchase Orders for reordering products,they will be scheduled that many days earlier to cope with unexpected vendor delays.")
@api.onchange('use_po_lead')
def _onchange_use_po_lead(self):
if not self.use_po_lead:
self.po_lead = 0.0
def get_default_fields(self, fields):
return dict(
use_po_lead=self.env['ir.config_parameter'].sudo().get_param('purchase.use_po_lead')
)
def set_fields(self):
self.env['ir.config_parameter'].sudo().set_param('purchase.use_po_lead', self.use_po_lead)
|
Remove useless default value for po_lead
|
## Code Before:
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
po_lead = fields.Float(related='company_id.po_lead', default=lambda self: self.env.user.company_id.po_lead)
use_po_lead = fields.Boolean(
string="Security Lead Time for Purchase",
oldname='default_new_po_lead',
help="Margin of error for vendor lead times. When the system generates Purchase Orders for reordering products,they will be scheduled that many days earlier to cope with unexpected vendor delays.")
@api.onchange('use_po_lead')
def _onchange_use_po_lead(self):
if not self.use_po_lead:
self.po_lead = 0.0
def get_default_fields(self, fields):
return dict(
use_po_lead=self.env['ir.config_parameter'].sudo().get_param('purchase.use_po_lead')
)
def set_fields(self):
self.env['ir.config_parameter'].sudo().set_param('purchase.use_po_lead', self.use_po_lead)
## Instruction:
Remove useless default value for po_lead
## Code After:
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
po_lead = fields.Float(related='company_id.po_lead')
use_po_lead = fields.Boolean(
string="Security Lead Time for Purchase",
oldname='default_new_po_lead',
help="Margin of error for vendor lead times. When the system generates Purchase Orders for reordering products,they will be scheduled that many days earlier to cope with unexpected vendor delays.")
@api.onchange('use_po_lead')
def _onchange_use_po_lead(self):
if not self.use_po_lead:
self.po_lead = 0.0
def get_default_fields(self, fields):
return dict(
use_po_lead=self.env['ir.config_parameter'].sudo().get_param('purchase.use_po_lead')
)
def set_fields(self):
self.env['ir.config_parameter'].sudo().set_param('purchase.use_po_lead', self.use_po_lead)
|
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
- po_lead = fields.Float(related='company_id.po_lead', default=lambda self: self.env.user.company_id.po_lead)
+ po_lead = fields.Float(related='company_id.po_lead')
use_po_lead = fields.Boolean(
string="Security Lead Time for Purchase",
oldname='default_new_po_lead',
help="Margin of error for vendor lead times. When the system generates Purchase Orders for reordering products,they will be scheduled that many days earlier to cope with unexpected vendor delays.")
@api.onchange('use_po_lead')
def _onchange_use_po_lead(self):
if not self.use_po_lead:
self.po_lead = 0.0
def get_default_fields(self, fields):
return dict(
use_po_lead=self.env['ir.config_parameter'].sudo().get_param('purchase.use_po_lead')
)
def set_fields(self):
self.env['ir.config_parameter'].sudo().set_param('purchase.use_po_lead', self.use_po_lead)
|
857750c5f2fba568c9ad3320d06b4178457be612
|
uwsgi/hello.py
|
uwsgi/hello.py
|
import ujson
def application(environ, start_response):
response = {
"message": "Hello, World!"
}
data = ujson.dumps(response)
response_headers = [
('Content-type', 'text/plain'),
('Content-Length', str(len(data)))
]
start_response('200 OK', response_headers)
return [data]
|
import ujson
def application(environ, start_response):
response = {
"message": "Hello, World!"
}
data = ujson.dumps(response)
response_headers = [
('Content-type', 'application/json'),
('Content-Length', str(len(data)))
]
start_response('200 OK', response_headers)
return [data]
|
Fix test to use proper Content-type for json test
|
uwsgi: Fix test to use proper Content-type for json test
|
Python
|
bsd-3-clause
|
nbrady-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zapov/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,testn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,dmacd/FB-try1,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,methane/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,doom369/FrameworkBenchmarks,doom369/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,grob/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,khellang/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,testn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,denkab/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,valyala/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,methane/FrameworkBenchmarks,grob/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,doom369/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zapov/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,joshk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,torhve/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,denkab/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,dmacd/FB-try1,mfirry/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,zloster/FrameworkBenchmarks,grob/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,torhve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,denkab/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zloster/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,methane/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,valyala/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,dmacd/FB-try1,kostya-sh/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,herloct/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,dmacd/FB-try1,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,methane/FrameworkBenchmarks,khellang/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,herloct/FrameworkBenchmarks,dmacd/FB-try1,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,leafo/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,methane/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,dmacd/FB-try1,greenlaw110/FrameworkBenchmarks,dmacd/FB-try1,F3Community/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sgml/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,joshk/FrameworkBenchmarks,methane/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,methane/FrameworkBenchmarks,methane/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,testn/FrameworkBenchmarks,leafo/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,actframework/FrameworkBenchmarks,grob/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jamming/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,doom369/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,leafo/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,testn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,joshk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,methane/FrameworkBenchmarks,leafo/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sgml/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,khellang/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,grob/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sgml/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,khellang/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,valyala/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,methane/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sxend/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Verber/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,torhve/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,torhve/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,dmacd/FB-try1,zane-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,denkab/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,joshk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jamming/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,testn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,doom369/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,torhve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jamming/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,herloct/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,leafo/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,denkab/FrameworkBenchmarks,khellang/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,testn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zloster/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Verber/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,testn/FrameworkBenchmarks,leafo/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,denkab/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Verber/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sxend/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,joshk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,testn/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,testn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,grob/FrameworkBenchmarks,dmacd/FB-try1,victorbriz/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,herloct/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,joshk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,dmacd/FB-try1,martin-g/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,leafo/FrameworkBenchmarks,leafo/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,leafo/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,leafo/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,denkab/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,torhve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,dmacd/FB-try1,donovanmuller/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,testn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,testn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,actframework/FrameworkBenchmarks,grob/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zapov/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,grob/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,herloct/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,denkab/FrameworkBenchmarks,dmacd/FB-try1,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,actframework/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks
|
import ujson
def application(environ, start_response):
response = {
"message": "Hello, World!"
}
data = ujson.dumps(response)
response_headers = [
- ('Content-type', 'text/plain'),
+ ('Content-type', 'application/json'),
('Content-Length', str(len(data)))
]
start_response('200 OK', response_headers)
return [data]
|
Fix test to use proper Content-type for json test
|
## Code Before:
import ujson
def application(environ, start_response):
response = {
"message": "Hello, World!"
}
data = ujson.dumps(response)
response_headers = [
('Content-type', 'text/plain'),
('Content-Length', str(len(data)))
]
start_response('200 OK', response_headers)
return [data]
## Instruction:
Fix test to use proper Content-type for json test
## Code After:
import ujson
def application(environ, start_response):
response = {
"message": "Hello, World!"
}
data = ujson.dumps(response)
response_headers = [
('Content-type', 'application/json'),
('Content-Length', str(len(data)))
]
start_response('200 OK', response_headers)
return [data]
|
import ujson
def application(environ, start_response):
response = {
"message": "Hello, World!"
}
data = ujson.dumps(response)
response_headers = [
- ('Content-type', 'text/plain'),
? ^^^^^
+ ('Content-type', 'application/json'),
? ^^ ++ + ++++++
('Content-Length', str(len(data)))
]
start_response('200 OK', response_headers)
return [data]
|
b8e479e799539be2e413de8052bf0af084e63c8e
|
osgtest/tests/test_25_voms_admin.py
|
osgtest/tests/test_25_voms_admin.py
|
import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
|
import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
|
Increase the timeout value for the VOMS Admin start-up from 60s to 120s. Primarily, this is driven by occasional timeouts in the VMU tests, which can run slowly on a heavily loaded host.
|
Increase the timeout value for the VOMS Admin start-up from 60s to 120s.
Primarily, this is driven by occasional timeouts in the VMU tests, which
can run slowly on a heavily loaded host.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@18485 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
- line, gap = core.monitor_file(core.config['voms.webapp-log'],
+ line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
- core.state['voms.webapp-log-stat'],
- 'VOMS-Admin started succesfully', 60.0)
+ 'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
+ command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
- command = ('voms-admin', '--nousercert',
- '--vo', core.config['voms.vo'],
- 'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
- 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
+ '/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
|
Increase the timeout value for the VOMS Admin start-up from 60s to 120s. Primarily, this is driven by occasional timeouts in the VMU tests, which can run slowly on a heavily loaded host.
|
## Code Before:
import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'],
core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 60.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert',
'--vo', core.config['voms.vo'],
'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
## Instruction:
Increase the timeout value for the VOMS Admin start-up from 60s to 120s. Primarily, this is driven by occasional timeouts in the VMU tests, which can run slowly on a heavily loaded host.
## Code After:
import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
'VOMS-Admin started succesfully', 120.0)
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
'/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
core.check_system(command, 'Add VOMS Admin ACL entry')
|
import os
import unittest
import osgtest.library.core as core
import osgtest.library.osgunittest as osgunittest
class TestSetupVomsAdmin(osgunittest.OSGTestCase):
def test_01_wait_for_voms_admin(self):
core.state['voms.started-webapp'] = False
core.skip_ok_unless_installed('voms-admin-server')
- line, gap = core.monitor_file(core.config['voms.webapp-log'],
+ line, gap = core.monitor_file(core.config['voms.webapp-log'], core.state['voms.webapp-log-stat'],
? ++++++++++++++++++++++++++++++++++++
- core.state['voms.webapp-log-stat'],
- 'VOMS-Admin started succesfully', 60.0)
? ^
+ 'VOMS-Admin started succesfully', 120.0)
? ^^
self.assert_(line is not None, 'VOMS Admin webapp started')
core.state['voms.started-webapp'] = True
core.log_message('VOMS Admin started after %.1f seconds' % gap)
def test_02_open_access(self):
core.skip_ok_unless_installed('voms-admin-server', 'voms-admin-client')
self.skip_ok_unless(core.state['voms.started-webapp'], 'VOMS Admin webapp not started')
+ command = ('voms-admin', '--nousercert', '--vo', core.config['voms.vo'], 'add-ACL-entry',
- command = ('voms-admin', '--nousercert',
- '--vo', core.config['voms.vo'],
- 'add-ACL-entry', '/' + core.config['voms.vo'], 'ANYONE',
- 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
+ '/' + core.config['voms.vo'], 'ANYONE', 'VOMS_CA', 'CONTAINER_READ,MEMBERSHIP_READ', 'true')
? ++++++++++++++++++++++++++++++++++++++++
core.check_system(command, 'Add VOMS Admin ACL entry')
|
18aafd9218efe636c6efb75980b2014d43b6736e
|
tests/test_conditionals.py
|
tests/test_conditionals.py
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
|
Test for unconditional else branches
|
Test for unconditional else branches
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
+
+ def test_unconditional_else():
+ assert run("""
+ thing Program
+ does start
+ if "dog" eq "dog"
+ Output.write("dog is dog")
+ otherwise
+ Output.write("dog is not dog")
+
+ if "dog" eq "cat"
+ Output.write("dog is cat")
+ otherwise
+ Output.write("dog is not cat")
+ """).output == """dog is dog\ndog is not cat""".strip()
+
+
|
Test for unconditional else branches
|
## Code Before:
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
## Instruction:
Test for unconditional else branches
## Code After:
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
+
+
+ def test_unconditional_else():
+ assert run("""
+ thing Program
+ does start
+ if "dog" eq "dog"
+ Output.write("dog is dog")
+ otherwise
+ Output.write("dog is not dog")
+
+ if "dog" eq "cat"
+ Output.write("dog is cat")
+ otherwise
+ Output.write("dog is not cat")
+ """).output == """dog is dog\ndog is not cat""".strip()
+
|
f0270de636bb84e89cbbb54896c6ed5037a48323
|
spiralgalaxygame/precondition.py
|
spiralgalaxygame/precondition.py
|
class PreconditionError (TypeError):
def __init__(self, callee, *args):
TypeError.__init__(self, '{0.__name__}{1!r}'.format(callee, args))
|
from types import FunctionType, MethodType
class PreconditionError (TypeError):
def __init__(self, callee, *args):
if isinstance(callee, MethodType):
name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
elif isinstance(callee, type) or isinstance(callee, FunctionType):
name = callee.__name__
TypeError.__init__(self, '{}{!r}'.format(name, args))
|
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
|
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
|
Python
|
agpl-3.0
|
nejucomo/sgg,nejucomo/sgg,nejucomo/sgg
|
+ from types import FunctionType, MethodType
+
+
class PreconditionError (TypeError):
def __init__(self, callee, *args):
- TypeError.__init__(self, '{0.__name__}{1!r}'.format(callee, args))
+ if isinstance(callee, MethodType):
+ name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
+ elif isinstance(callee, type) or isinstance(callee, FunctionType):
+ name = callee.__name__
+ TypeError.__init__(self, '{}{!r}'.format(name, args))
+
|
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
|
## Code Before:
class PreconditionError (TypeError):
def __init__(self, callee, *args):
TypeError.__init__(self, '{0.__name__}{1!r}'.format(callee, args))
## Instruction:
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
## Code After:
from types import FunctionType, MethodType
class PreconditionError (TypeError):
def __init__(self, callee, *args):
if isinstance(callee, MethodType):
name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
elif isinstance(callee, type) or isinstance(callee, FunctionType):
name = callee.__name__
TypeError.__init__(self, '{}{!r}'.format(name, args))
|
+ from types import FunctionType, MethodType
+
+
class PreconditionError (TypeError):
def __init__(self, callee, *args):
+ if isinstance(callee, MethodType):
+ name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
+ elif isinstance(callee, type) or isinstance(callee, FunctionType):
+ name = callee.__name__
+
- TypeError.__init__(self, '{0.__name__}{1!r}'.format(callee, args))
? ---------- - ^ ^^^
+ TypeError.__init__(self, '{}{!r}'.format(name, args))
? ^ ^
|
86d12c7d13bd7a11a93deccf42f93df4328e70fd
|
admin_honeypot/urls.py
|
admin_honeypot/urls.py
|
from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
Update url() to path() in the urlconf.
|
Update url() to path() in the urlconf.
|
Python
|
mit
|
dmpayton/django-admin-honeypot,dmpayton/django-admin-honeypot
|
from admin_honeypot import views
- from django.conf.urls import url
+ from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
- url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
+ path('login/', views.AdminHoneypot.as_view(), name='login'),
- url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
+ re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
Update url() to path() in the urlconf.
|
## Code Before:
from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
## Instruction:
Update url() to path() in the urlconf.
## Code After:
from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
from admin_honeypot import views
- from django.conf.urls import url
+ from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
- url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
? ^^^ - - -
+ path('login/', views.AdminHoneypot.as_view(), name='login'),
? ^^^^
- url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
? - ^
+ re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
? ^^^^^^
]
|
6d32f609379febe2fdad690adc75a90e26b8d416
|
backend/backend/serializers.py
|
backend/backend/serializers.py
|
from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.ModelSerializer):
class Meta:
model = Animal
fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
|
from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.ModelSerializer):
class Meta:
model = Animal
fields = ('id', 'name', 'dob', 'gender',
'active', 'own', 'father', 'mother')
def validate_father(self, father):
if (father.gender != Animal.MALE):
raise serializers.ValidationError('The father has to be male.')
def validate_mother(self, mother):
if (mother.gender != Animal.FEMALE):
raise serializers.ValidationError('The mother has to be female.')
def validate_dob(self, dob):
father_id = self.context['request'].data['father']
if (father_id):
father = Animal.objects.get(pk = father_id)
if (father and father.dob > dob):
raise serializers.ValidationError('Animal can\'t be older than it\'s father')
mother_id = self.context['request'].data['mother']
if (mother_id):
mother = Animal.objects.get(pk = mother_id)
if (mother and mother.dob > dob):
raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
|
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
|
Add validator that selected father is male and mother is female.
Validate that the animal is younger than it's parents.
|
Python
|
apache-2.0
|
mmlado/animal_pairing,mmlado/animal_pairing
|
from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.ModelSerializer):
class Meta:
model = Animal
- fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
+ fields = ('id', 'name', 'dob', 'gender',
+ 'active', 'own', 'father', 'mother')
+
+ def validate_father(self, father):
+ if (father.gender != Animal.MALE):
+ raise serializers.ValidationError('The father has to be male.')
+
+ def validate_mother(self, mother):
+ if (mother.gender != Animal.FEMALE):
+ raise serializers.ValidationError('The mother has to be female.')
+
+ def validate_dob(self, dob):
+ father_id = self.context['request'].data['father']
+ if (father_id):
+ father = Animal.objects.get(pk = father_id)
+ if (father and father.dob > dob):
+ raise serializers.ValidationError('Animal can\'t be older than it\'s father')
+
+ mother_id = self.context['request'].data['mother']
+ if (mother_id):
+ mother = Animal.objects.get(pk = mother_id)
+ if (mother and mother.dob > dob):
+ raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
+
|
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
|
## Code Before:
from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.ModelSerializer):
class Meta:
model = Animal
fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
## Instruction:
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
## Code After:
from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.ModelSerializer):
class Meta:
model = Animal
fields = ('id', 'name', 'dob', 'gender',
'active', 'own', 'father', 'mother')
def validate_father(self, father):
if (father.gender != Animal.MALE):
raise serializers.ValidationError('The father has to be male.')
def validate_mother(self, mother):
if (mother.gender != Animal.FEMALE):
raise serializers.ValidationError('The mother has to be female.')
def validate_dob(self, dob):
father_id = self.context['request'].data['father']
if (father_id):
father = Animal.objects.get(pk = father_id)
if (father and father.dob > dob):
raise serializers.ValidationError('Animal can\'t be older than it\'s father')
mother_id = self.context['request'].data['mother']
if (mother_id):
mother = Animal.objects.get(pk = mother_id)
if (mother and mother.dob > dob):
raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
|
from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.ModelSerializer):
class Meta:
model = Animal
- fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
+ fields = ('id', 'name', 'dob', 'gender',
+ 'active', 'own', 'father', 'mother')
+
+ def validate_father(self, father):
+ if (father.gender != Animal.MALE):
+ raise serializers.ValidationError('The father has to be male.')
+
+ def validate_mother(self, mother):
+ if (mother.gender != Animal.FEMALE):
+ raise serializers.ValidationError('The mother has to be female.')
+
+ def validate_dob(self, dob):
+ father_id = self.context['request'].data['father']
+ if (father_id):
+ father = Animal.objects.get(pk = father_id)
+ if (father and father.dob > dob):
+ raise serializers.ValidationError('Animal can\'t be older than it\'s father')
+
+ mother_id = self.context['request'].data['mother']
+ if (mother_id):
+ mother = Animal.objects.get(pk = mother_id)
+ if (mother and mother.dob > dob):
+ raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
|
91f250485b86339b13f5a073e5879292525f9015
|
nbparameterise/code_drivers/python3.py
|
nbparameterise/code_drivers/python3.py
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
Add lists as valid parameters
|
Add lists as valid parameters
|
Python
|
mit
|
takluyver/nbparameterise
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
+ def check_list(node):
+ def bool_check(node):
+ return isinstance(node, ast.NameConstant) and (node.value in (True, False))
+ return all([(isinstance(n, (ast.Num, ast.Str))
+ or bool_check(n)) for n in node.elts])
+
def check_fillable_node(node, path):
- if isinstance(node, (ast.Num, ast.Str, ast.List)):
+ if isinstance(node, (ast.Num, ast.Str)):
+ return
+ elif (isinstance(node, ast.List)
+ and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
-
+
- raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
+ raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
- elif isisntance(node, ast.List):
+ elif isinstance(node, ast.List):
- return list, node.s
+ return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
+
+
|
Add lists as valid parameters
|
## Code Before:
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
## Instruction:
Add lists as valid parameters
## Code After:
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
+ def check_list(node):
+ def bool_check(node):
+ return isinstance(node, ast.NameConstant) and (node.value in (True, False))
+ return all([(isinstance(n, (ast.Num, ast.Str))
+ or bool_check(n)) for n in node.elts])
+
def check_fillable_node(node, path):
- if isinstance(node, (ast.Num, ast.Str, ast.List)):
? ----------
+ if isinstance(node, (ast.Num, ast.Str)):
+ return
+ elif (isinstance(node, ast.List)
+ and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
-
+
- raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
+ raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
? ++++++
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
- elif isisntance(node, ast.List):
? -
+ elif isinstance(node, ast.List):
? +
- return list, node.s
+ return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
+
|
b824e4c4a106d73c842a38758addde52d94e976a
|
ngrams_feature_extractor.py
|
ngrams_feature_extractor.py
|
import sklearn
def make_train_pair(filename):
h5 = open_h5_file_read(filename)
title = get_title(h5)
pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning
pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))]
h5.close()
return {'title': title, 'pitch_diffs': pitch_diffs}
# some lines omitted
neigh = NearestNeighbors(n_neighbors=1) # predict the closest song
# a title list is also maintained
neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]])
neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
|
import sklearn
from hdf5_getters import *
import os
def make_train_pair(filename):
h5 = open_h5_file_read(filename)
title = get_title(h5)
pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning
pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))]
h5.close()
return {'title': title, 'pitch_diffs': pitch_diffs}
for root, dirs, files in os.walk('data'):
files = glob.glob(os.path.join(root, '*h5'))
for f in files:
train_pair = make_train_pair(f)
titles.append(train_pair['title'])
pitch_diff_list.append(train_pair['pitch_diffs'])
# some lines omitted
neigh = NearestNeighbors(n_neighbors=1) # predict the closest song
# a title list is also maintained
neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]])
neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
|
Add corresponding hdf5 parsing file
|
Add corresponding hdf5 parsing file
|
Python
|
mit
|
ajnam12/MusicNLP
|
import sklearn
-
+ from hdf5_getters import *
+ import os
def make_train_pair(filename):
h5 = open_h5_file_read(filename)
title = get_title(h5)
pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning
pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))]
h5.close()
return {'title': title, 'pitch_diffs': pitch_diffs}
+
+ for root, dirs, files in os.walk('data'):
+ files = glob.glob(os.path.join(root, '*h5'))
+ for f in files:
+ train_pair = make_train_pair(f)
+ titles.append(train_pair['title'])
+ pitch_diff_list.append(train_pair['pitch_diffs'])
+
# some lines omitted
neigh = NearestNeighbors(n_neighbors=1) # predict the closest song
# a title list is also maintained
neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]])
neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
|
Add corresponding hdf5 parsing file
|
## Code Before:
import sklearn
def make_train_pair(filename):
h5 = open_h5_file_read(filename)
title = get_title(h5)
pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning
pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))]
h5.close()
return {'title': title, 'pitch_diffs': pitch_diffs}
# some lines omitted
neigh = NearestNeighbors(n_neighbors=1) # predict the closest song
# a title list is also maintained
neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]])
neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
## Instruction:
Add corresponding hdf5 parsing file
## Code After:
import sklearn
from hdf5_getters import *
import os
def make_train_pair(filename):
h5 = open_h5_file_read(filename)
title = get_title(h5)
pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning
pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))]
h5.close()
return {'title': title, 'pitch_diffs': pitch_diffs}
for root, dirs, files in os.walk('data'):
files = glob.glob(os.path.join(root, '*h5'))
for f in files:
train_pair = make_train_pair(f)
titles.append(train_pair['title'])
pitch_diff_list.append(train_pair['pitch_diffs'])
# some lines omitted
neigh = NearestNeighbors(n_neighbors=1) # predict the closest song
# a title list is also maintained
neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]])
neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
|
import sklearn
-
+ from hdf5_getters import *
+ import os
def make_train_pair(filename):
h5 = open_h5_file_read(filename)
title = get_title(h5)
pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning
pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))]
h5.close()
return {'title': title, 'pitch_diffs': pitch_diffs}
+
+ for root, dirs, files in os.walk('data'):
+ files = glob.glob(os.path.join(root, '*h5'))
+ for f in files:
+ train_pair = make_train_pair(f)
+ titles.append(train_pair['title'])
+ pitch_diff_list.append(train_pair['pitch_diffs'])
+
# some lines omitted
neigh = NearestNeighbors(n_neighbors=1) # predict the closest song
# a title list is also maintained
neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]])
neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
|
93623d3bc8336073b65f586e2d1573831c492084
|
iatidataquality/__init__.py
|
iatidataquality/__init__.py
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__.split('.')[0])
app.config.from_pyfile('../config.py')
db = SQLAlchemy(app)
import api
import routes
import publishers
import publisher_conditions
import tests
import organisations
import organisations_feedback
import registry
import packages
import indicators
import aggregationtypes
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__.split('.')[0])
app.config.from_pyfile('../config.py')
db = SQLAlchemy(app)
import api
import routes
import publishers
import publisher_conditions
import tests
import organisations
import organisations_feedback
import registry
import packages
import indicators
import aggregationtypes
import survey
|
Add survey controller to routes
|
Add survey controller to routes
|
Python
|
agpl-3.0
|
pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__.split('.')[0])
app.config.from_pyfile('../config.py')
db = SQLAlchemy(app)
import api
import routes
import publishers
import publisher_conditions
import tests
import organisations
import organisations_feedback
import registry
import packages
import indicators
import aggregationtypes
+ import survey
|
Add survey controller to routes
|
## Code Before:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__.split('.')[0])
app.config.from_pyfile('../config.py')
db = SQLAlchemy(app)
import api
import routes
import publishers
import publisher_conditions
import tests
import organisations
import organisations_feedback
import registry
import packages
import indicators
import aggregationtypes
## Instruction:
Add survey controller to routes
## Code After:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__.split('.')[0])
app.config.from_pyfile('../config.py')
db = SQLAlchemy(app)
import api
import routes
import publishers
import publisher_conditions
import tests
import organisations
import organisations_feedback
import registry
import packages
import indicators
import aggregationtypes
import survey
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__.split('.')[0])
app.config.from_pyfile('../config.py')
db = SQLAlchemy(app)
import api
import routes
import publishers
import publisher_conditions
import tests
import organisations
import organisations_feedback
import registry
import packages
import indicators
import aggregationtypes
+ import survey
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.