commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6d291571dca59243c0a92f9955776e1acd2e87da
|
falmer/content/queries.py
|
falmer/content/queries.py
|
import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
raise GraphQLError(f'404: Page not found for {path}')
def resolve_all_pages(self, info):
return Page.objects.specific().live()
|
import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
return None
def resolve_all_pages(self, info):
return Page.objects.specific().live()
|
Return empty result rather than graphql error
|
Return empty result rather than graphql error
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
- raise GraphQLError(f'404: Page not found for {path}')
+ return None
def resolve_all_pages(self, info):
return Page.objects.specific().live()
|
Return empty result rather than graphql error
|
## Code Before:
import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
raise GraphQLError(f'404: Page not found for {path}')
def resolve_all_pages(self, info):
return Page.objects.specific().live()
## Instruction:
Return empty result rather than graphql error
## Code After:
import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
return None
def resolve_all_pages(self, info):
return Page.objects.specific().live()
|
import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
- raise GraphQLError(f'404: Page not found for {path}')
+ return None
def resolve_all_pages(self, info):
return Page.objects.specific().live()
|
2aab542cc74fdc0cf060518241f01fd74d91ecb5
|
byceps/services/user/transfer/models.py
|
byceps/services/user/transfer/models.py
|
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
|
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
Fix type hint for avatar URL in user DTO
|
Fix type hint for avatar URL in user DTO
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps
|
+
+ from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
- avatar_url: str
+ avatar_url: Optional[str]
is_orga: bool
|
Fix type hint for avatar URL in user DTO
|
## Code Before:
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
## Instruction:
Fix type hint for avatar URL in user DTO
## Code After:
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
+
+ from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
- avatar_url: str
+ avatar_url: Optional[str]
? +++++++++ +
is_orga: bool
|
5b038b468af0f5a060eaea3bd2956ff85ad09071
|
conman/redirects/views.py
|
conman/redirects/views.py
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
class URLRedirectView(RedirectView):
"""Redirect to a URLRedirect Route's target URL."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, route, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
self.permanent = route.permanent
return route.target.url
class URLRedirectView(RedirectView):
"""Redirect to a URLRedirect Route's target URL."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, route, **kwargs):
"""
Return the target url.
Save the route's redirect type for use by RedirectView.
"""
self.permanent = route.permanent
return route.target
|
Use explicit kwarg over kwargs dictionary access
|
Use explicit kwarg over kwargs dictionary access
|
Python
|
bsd-2-clause
|
meshy/django-conman,meshy/django-conman,Ian-Foote/django-conman
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
- def get_redirect_url(self, *args, **kwargs):
+ def get_redirect_url(self, *args, route, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
- redirect = kwargs['route']
- self.permanent = redirect.permanent
+ self.permanent = route.permanent
- return redirect.target.url
+ return route.target.url
class URLRedirectView(RedirectView):
"""Redirect to a URLRedirect Route's target URL."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
- def get_redirect_url(self, *args, **kwargs):
+ def get_redirect_url(self, *args, route, **kwargs):
"""
Return the target url.
Save the route's redirect type for use by RedirectView.
"""
- redirect = kwargs['route']
- self.permanent = redirect.permanent
+ self.permanent = route.permanent
- return redirect.target
+ return route.target
|
Use explicit kwarg over kwargs dictionary access
|
## Code Before:
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
class URLRedirectView(RedirectView):
"""Redirect to a URLRedirect Route's target URL."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target
## Instruction:
Use explicit kwarg over kwargs dictionary access
## Code After:
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, route, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
self.permanent = route.permanent
return route.target.url
class URLRedirectView(RedirectView):
"""Redirect to a URLRedirect Route's target URL."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, route, **kwargs):
"""
Return the target url.
Save the route's redirect type for use by RedirectView.
"""
self.permanent = route.permanent
return route.target
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
- def get_redirect_url(self, *args, **kwargs):
+ def get_redirect_url(self, *args, route, **kwargs):
? +++++++
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
- redirect = kwargs['route']
- self.permanent = redirect.permanent
? ------
+ self.permanent = route.permanent
? +++
- return redirect.target.url
? ------
+ return route.target.url
? +++
class URLRedirectView(RedirectView):
"""Redirect to a URLRedirect Route's target URL."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
- def get_redirect_url(self, *args, **kwargs):
+ def get_redirect_url(self, *args, route, **kwargs):
? +++++++
"""
Return the target url.
Save the route's redirect type for use by RedirectView.
"""
- redirect = kwargs['route']
- self.permanent = redirect.permanent
? ------
+ self.permanent = route.permanent
? +++
- return redirect.target
? ------
+ return route.target
? +++
|
7848338fd8c1a73c8371617fc4b72a139380cc50
|
blaze/expr/tests/test_strings.py
|
blaze/expr/tests/test_strings.py
|
import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
|
import datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
|
Test for new like expression
|
Test for new like expression
|
Python
|
bsd-3-clause
|
ContinuumIO/blaze,cpcloud/blaze,ContinuumIO/blaze,cpcloud/blaze,cowlicks/blaze,cowlicks/blaze
|
import datashape
- from blaze.expr import TableSymbol, like, Like
+ import pytest
+ from datashape import dshape
+
+ from blaze import symbol
+ @pytest.mark.parametrize(
+ 'ds',
+ [
+ 'var * {name: string}',
+ 'var * {name: ?string}',
+ 'var * string',
+ 'var * ?string',
+ 'string',
+ ]
+ )
- def test_like():
+ def test_like(ds):
- t = TableSymbol('t', '{name: string, amount: int, city: string}')
+ t = symbol('t', ds)
+ expr = getattr(t, 'name', t).like('Alice*')
+ assert expr.pattern == 'Alice*'
+ assert expr.schema.measure == dshape(
+ '%sbool' % ('?' if '?' in ds else '')
+ ).measure
- expr = like(t, name='Alice*')
-
- assert eval(str(expr)).isidentical(expr)
- assert expr.schema == t.schema
- assert expr.dshape[0] == datashape.var
-
|
Test for new like expression
|
## Code Before:
import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
## Instruction:
Test for new like expression
## Code After:
import datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
|
import datashape
- from blaze.expr import TableSymbol, like, Like
+ import pytest
+ from datashape import dshape
+
+ from blaze import symbol
+ @pytest.mark.parametrize(
+ 'ds',
+ [
+ 'var * {name: string}',
+ 'var * {name: ?string}',
+ 'var * string',
+ 'var * ?string',
+ 'string',
+ ]
+ )
- def test_like():
+ def test_like(ds):
? ++
+ t = symbol('t', ds)
+ expr = getattr(t, 'name', t).like('Alice*')
+ assert expr.pattern == 'Alice*'
- t = TableSymbol('t', '{name: string, amount: int, city: string}')
-
- expr = like(t, name='Alice*')
-
- assert eval(str(expr)).isidentical(expr)
- assert expr.schema == t.schema
? ^^ - ^^
+ assert expr.schema.measure == dshape(
? ++++++++ ^ ++ ^
- assert expr.dshape[0] == datashape.var
+ '%sbool' % ('?' if '?' in ds else '')
+ ).measure
|
a42997458baa1c6a1648896ff50f44e79525f8a1
|
ognskylines/commands/devices/insert.py
|
ognskylines/commands/devices/insert.py
|
from ognskylines.dbutils import session
from ognskylines.model import Device
from ogn.utils import get_ddb, get_trackable
from manager import Manager
manager = Manager()
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
devices = get_trackable(get_ddb())
for ogn_address in devices:
device = Device(ogn_address=ogn_address[3:])
session.add(device)
session.commit()
print("Imported {} devices.".format(len(devices)))
|
from ognskylines.dbutils import session
from ognskylines.model import Device
import requests
from manager import Manager
manager = Manager()
DDB_URL = "http://ddb.glidernet.org/download/?j=1"
def get_ddb():
devices = requests.get(DDB_URL).json()
for device in devices['devices']:
device.update({'identified': device['identified'] == 'Y',
'tracked': device['tracked'] == 'Y'})
yield device
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
for device in get_ddb():
if device['identified'] and device['tracked']:
session.add(Device(ogn_address=device['device_id']))
session.commit()
print("Imported {} devices.".format(session.query(Device).count()))
|
Add function to fetch devices from the DDB
|
Add function to fetch devices from the DDB
|
Python
|
agpl-3.0
|
kerel-fs/ogn-skylines-gateway,kerel-fs/ogn-skylines-gateway
|
from ognskylines.dbutils import session
from ognskylines.model import Device
+ import requests
- from ogn.utils import get_ddb, get_trackable
-
from manager import Manager
manager = Manager()
+
+
+ DDB_URL = "http://ddb.glidernet.org/download/?j=1"
+
+
+ def get_ddb():
+ devices = requests.get(DDB_URL).json()
+ for device in devices['devices']:
+ device.update({'identified': device['identified'] == 'Y',
+ 'tracked': device['tracked'] == 'Y'})
+ yield device
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
- devices = get_trackable(get_ddb())
- for ogn_address in devices:
- device = Device(ogn_address=ogn_address[3:])
- session.add(device)
+ for device in get_ddb():
+ if device['identified'] and device['tracked']:
+ session.add(Device(ogn_address=device['device_id']))
+
session.commit()
- print("Imported {} devices.".format(len(devices)))
+ print("Imported {} devices.".format(session.query(Device).count()))
|
Add function to fetch devices from the DDB
|
## Code Before:
from ognskylines.dbutils import session
from ognskylines.model import Device
from ogn.utils import get_ddb, get_trackable
from manager import Manager
manager = Manager()
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
devices = get_trackable(get_ddb())
for ogn_address in devices:
device = Device(ogn_address=ogn_address[3:])
session.add(device)
session.commit()
print("Imported {} devices.".format(len(devices)))
## Instruction:
Add function to fetch devices from the DDB
## Code After:
from ognskylines.dbutils import session
from ognskylines.model import Device
import requests
from manager import Manager
manager = Manager()
DDB_URL = "http://ddb.glidernet.org/download/?j=1"
def get_ddb():
devices = requests.get(DDB_URL).json()
for device in devices['devices']:
device.update({'identified': device['identified'] == 'Y',
'tracked': device['tracked'] == 'Y'})
yield device
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
for device in get_ddb():
if device['identified'] and device['tracked']:
session.add(Device(ogn_address=device['device_id']))
session.commit()
print("Imported {} devices.".format(session.query(Device).count()))
|
from ognskylines.dbutils import session
from ognskylines.model import Device
+ import requests
- from ogn.utils import get_ddb, get_trackable
-
from manager import Manager
manager = Manager()
+
+
+ DDB_URL = "http://ddb.glidernet.org/download/?j=1"
+
+
+ def get_ddb():
+ devices = requests.get(DDB_URL).json()
+ for device in devices['devices']:
+ device.update({'identified': device['identified'] == 'Y',
+ 'tracked': device['tracked'] == 'Y'})
+ yield device
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
- devices = get_trackable(get_ddb())
- for ogn_address in devices:
- device = Device(ogn_address=ogn_address[3:])
- session.add(device)
+ for device in get_ddb():
+ if device['identified'] and device['tracked']:
+ session.add(Device(ogn_address=device['device_id']))
+
session.commit()
- print("Imported {} devices.".format(len(devices)))
? ^ ^ ^
+ print("Imported {} devices.".format(session.query(Device).count()))
? ^ ++++ ++++++ ^ ^^^^^^^^
|
cc43b4f14706027c0bd0c15e1467d5df586faff8
|
shoop/front/apps/simple_order_notification/templates.py
|
shoop/front/apps/simple_order_notification/templates.py
|
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
{%- if line.taxful_price %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
{% endif -%}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
Remove price check from order email template
|
Remove price check from order email template
For some reason, only lines with prices were rendered in the email. Changed
this so that the free lines (from campaigns) are shown also.
No ref
|
Python
|
agpl-3.0
|
shawnadelic/shuup,shawnadelic/shuup,suutari/shoop,suutari-ai/shoop,suutari/shoop,hrayr-artunyan/shuup,shoopio/shoop,suutari/shoop,shoopio/shoop,shoopio/shoop,suutari-ai/shoop,suutari-ai/shoop,hrayr-artunyan/shuup,hrayr-artunyan/shuup,shawnadelic/shuup
|
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
- {%- if line.taxful_price %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
- {% endif -%}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
Remove price check from order email template
|
## Code Before:
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
{%- if line.taxful_price %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
{% endif -%}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
## Instruction:
Remove price check from order email template
## Code After:
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
- {%- if line.taxful_price %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
- {% endif -%}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
dbbbb763db09fdf777558b781a29d4b8a71b3e62
|
src/DeepLearn/venv/Lab/Array.py
|
src/DeepLearn/venv/Lab/Array.py
|
import numpy as np
arr1 = np.array([[1,2],[3,4]])
arr2 = np.array([[5,6],[7,8]])
arr3 = np.array([[1,2],[3,4],[5,6]])
arr4 = np.array([7,8])
# 列出矩陣的維度
# print(arr1.shape)
# print(arr1.shape[0])
# 矩陣乘積
# print(np.dot(arr1,arr2))
print(np.dot(arr3,arr4))
# print(arr1*arr2)
|
import numpy as np
arr1 = np.array([[1,2],[3,4]])
arr2 = np.array([[5,6],[7,8]])
arr3 = np.array([[1,2],[3,4],[5,6]])
arr4 = np.array([7,8])
# 列出矩陣的維度
# print(arr1.shape)
# print(arr1.shape[0])
# 矩陣乘積
# print(np.dot(arr1,arr2))
print(np.dot(arr3,arr4))
print(1*7+2*7)
# print(arr1*arr2)
|
Create 3 layer nt sample
|
Create 3 layer nt sample
|
Python
|
mit
|
KarateJB/Python.Practice,KarateJB/Python.Practice,KarateJB/Python.Practice,KarateJB/Python.Practice
|
import numpy as np
arr1 = np.array([[1,2],[3,4]])
arr2 = np.array([[5,6],[7,8]])
arr3 = np.array([[1,2],[3,4],[5,6]])
arr4 = np.array([7,8])
# 列出矩陣的維度
# print(arr1.shape)
# print(arr1.shape[0])
# 矩陣乘積
# print(np.dot(arr1,arr2))
- print(np.dot(arr3,arr4))
+ print(np.dot(arr3,arr4))
+
+ print(1*7+2*7)
# print(arr1*arr2)
|
Create 3 layer nt sample
|
## Code Before:
import numpy as np
arr1 = np.array([[1,2],[3,4]])
arr2 = np.array([[5,6],[7,8]])
arr3 = np.array([[1,2],[3,4],[5,6]])
arr4 = np.array([7,8])
# 列出矩陣的維度
# print(arr1.shape)
# print(arr1.shape[0])
# 矩陣乘積
# print(np.dot(arr1,arr2))
print(np.dot(arr3,arr4))
# print(arr1*arr2)
## Instruction:
Create 3 layer nt sample
## Code After:
import numpy as np
arr1 = np.array([[1,2],[3,4]])
arr2 = np.array([[5,6],[7,8]])
arr3 = np.array([[1,2],[3,4],[5,6]])
arr4 = np.array([7,8])
# 列出矩陣的維度
# print(arr1.shape)
# print(arr1.shape[0])
# 矩陣乘積
# print(np.dot(arr1,arr2))
print(np.dot(arr3,arr4))
print(1*7+2*7)
# print(arr1*arr2)
|
import numpy as np
arr1 = np.array([[1,2],[3,4]])
arr2 = np.array([[5,6],[7,8]])
arr3 = np.array([[1,2],[3,4],[5,6]])
arr4 = np.array([7,8])
# 列出矩陣的維度
# print(arr1.shape)
# print(arr1.shape[0])
# 矩陣乘積
# print(np.dot(arr1,arr2))
- print(np.dot(arr3,arr4))
+ print(np.dot(arr3,arr4))
? +
+
+ print(1*7+2*7)
# print(arr1*arr2)
|
dfd4a6f6b23447538b2b22da11666f5218d791db
|
mots_vides/constants.py
|
mots_vides/constants.py
|
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
|
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
LANGUAGE_CODES = {
'af': 'afrikaans',
'ar': 'arabic',
'az': 'azerbaijani',
'bg': 'bulgarian',
'be': 'belarusian',
'bn': 'bengali',
'br': 'breton',
'bs': 'bosnian',
'ca': 'catalan',
'cs': 'czech',
'cy': 'welsh',
'da': 'danish',
'de': 'german',
'el': 'greek',
'en': 'english',
'eo': 'esperanto',
'es': 'spanish',
'et': 'estonian',
'eu': 'basque',
'fa': 'persian',
'fi': 'finnish',
'fr': 'french',
'fy': 'frisian',
'ga': 'irish',
'gl': 'galician',
'he': 'hebrew',
'hi': 'hindi',
'hr': 'croatian',
'hu': 'hungarian',
'ia': 'interlingua',
'id': 'indonesian',
'io': 'ido',
'is': 'icelandic',
'it': 'italian',
'ja': 'japanese',
'ka': 'georgian',
'kk': 'kazakh',
'km': 'khmer',
'kn': 'kannada',
'ko': 'korean',
'lb': 'luxembourgish',
'lt': 'lithuanian',
'lv': 'latvian',
'mk': 'macedonian',
'ml': 'malayalam',
'mn': 'mongolian',
'mr': 'marathi',
'my': 'burmese',
'nb': 'norwegian',
'ne': 'nepali',
'nl': 'dutch',
'os': 'ossetic',
'pa': 'punjabi',
'pl': 'polish',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'sk': 'slovak',
'sl': 'slovenian',
'sq': 'albanian',
'sr': 'serbian',
'sv': 'swedish',
'sw': 'swahili',
'ta': 'tamil',
'te': 'telugu',
'th': 'thai',
'tr': 'turkish',
'tt': 'tatar',
'uk': 'ukrainian',
'ur': 'urdu',
'vi': 'vietnamese',
'zh': 'chinese',
}
|
Define a complete list of language code, for easy future maintenance
|
Define a complete list of language code, for easy future maintenance
|
Python
|
bsd-3-clause
|
Fantomas42/mots-vides,Fantomas42/mots-vides
|
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
+ LANGUAGE_CODES = {
+ 'af': 'afrikaans',
+ 'ar': 'arabic',
+ 'az': 'azerbaijani',
+ 'bg': 'bulgarian',
+ 'be': 'belarusian',
+ 'bn': 'bengali',
+ 'br': 'breton',
+ 'bs': 'bosnian',
+ 'ca': 'catalan',
+ 'cs': 'czech',
+ 'cy': 'welsh',
+ 'da': 'danish',
+ 'de': 'german',
+ 'el': 'greek',
+ 'en': 'english',
+ 'eo': 'esperanto',
+ 'es': 'spanish',
+ 'et': 'estonian',
+ 'eu': 'basque',
+ 'fa': 'persian',
+ 'fi': 'finnish',
+ 'fr': 'french',
+ 'fy': 'frisian',
+ 'ga': 'irish',
+ 'gl': 'galician',
+ 'he': 'hebrew',
+ 'hi': 'hindi',
+ 'hr': 'croatian',
+ 'hu': 'hungarian',
+ 'ia': 'interlingua',
+ 'id': 'indonesian',
+ 'io': 'ido',
+ 'is': 'icelandic',
+ 'it': 'italian',
+ 'ja': 'japanese',
+ 'ka': 'georgian',
+ 'kk': 'kazakh',
+ 'km': 'khmer',
+ 'kn': 'kannada',
+ 'ko': 'korean',
+ 'lb': 'luxembourgish',
+ 'lt': 'lithuanian',
+ 'lv': 'latvian',
+ 'mk': 'macedonian',
+ 'ml': 'malayalam',
+ 'mn': 'mongolian',
+ 'mr': 'marathi',
+ 'my': 'burmese',
+ 'nb': 'norwegian',
+ 'ne': 'nepali',
+ 'nl': 'dutch',
+ 'os': 'ossetic',
+ 'pa': 'punjabi',
+ 'pl': 'polish',
+ 'pt': 'portuguese',
+ 'ro': 'romanian',
+ 'ru': 'russian',
+ 'sk': 'slovak',
+ 'sl': 'slovenian',
+ 'sq': 'albanian',
+ 'sr': 'serbian',
+ 'sv': 'swedish',
+ 'sw': 'swahili',
+ 'ta': 'tamil',
+ 'te': 'telugu',
+ 'th': 'thai',
+ 'tr': 'turkish',
+ 'tt': 'tatar',
+ 'uk': 'ukrainian',
+ 'ur': 'urdu',
+ 'vi': 'vietnamese',
+ 'zh': 'chinese',
+ }
+
|
Define a complete list of language code, for easy future maintenance
|
## Code Before:
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
## Instruction:
Define a complete list of language code, for easy future maintenance
## Code After:
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
LANGUAGE_CODES = {
'af': 'afrikaans',
'ar': 'arabic',
'az': 'azerbaijani',
'bg': 'bulgarian',
'be': 'belarusian',
'bn': 'bengali',
'br': 'breton',
'bs': 'bosnian',
'ca': 'catalan',
'cs': 'czech',
'cy': 'welsh',
'da': 'danish',
'de': 'german',
'el': 'greek',
'en': 'english',
'eo': 'esperanto',
'es': 'spanish',
'et': 'estonian',
'eu': 'basque',
'fa': 'persian',
'fi': 'finnish',
'fr': 'french',
'fy': 'frisian',
'ga': 'irish',
'gl': 'galician',
'he': 'hebrew',
'hi': 'hindi',
'hr': 'croatian',
'hu': 'hungarian',
'ia': 'interlingua',
'id': 'indonesian',
'io': 'ido',
'is': 'icelandic',
'it': 'italian',
'ja': 'japanese',
'ka': 'georgian',
'kk': 'kazakh',
'km': 'khmer',
'kn': 'kannada',
'ko': 'korean',
'lb': 'luxembourgish',
'lt': 'lithuanian',
'lv': 'latvian',
'mk': 'macedonian',
'ml': 'malayalam',
'mn': 'mongolian',
'mr': 'marathi',
'my': 'burmese',
'nb': 'norwegian',
'ne': 'nepali',
'nl': 'dutch',
'os': 'ossetic',
'pa': 'punjabi',
'pl': 'polish',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'sk': 'slovak',
'sl': 'slovenian',
'sq': 'albanian',
'sr': 'serbian',
'sv': 'swedish',
'sw': 'swahili',
'ta': 'tamil',
'te': 'telugu',
'th': 'thai',
'tr': 'turkish',
'tt': 'tatar',
'uk': 'ukrainian',
'ur': 'urdu',
'vi': 'vietnamese',
'zh': 'chinese',
}
|
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
+
+ LANGUAGE_CODES = {
+ 'af': 'afrikaans',
+ 'ar': 'arabic',
+ 'az': 'azerbaijani',
+ 'bg': 'bulgarian',
+ 'be': 'belarusian',
+ 'bn': 'bengali',
+ 'br': 'breton',
+ 'bs': 'bosnian',
+ 'ca': 'catalan',
+ 'cs': 'czech',
+ 'cy': 'welsh',
+ 'da': 'danish',
+ 'de': 'german',
+ 'el': 'greek',
+ 'en': 'english',
+ 'eo': 'esperanto',
+ 'es': 'spanish',
+ 'et': 'estonian',
+ 'eu': 'basque',
+ 'fa': 'persian',
+ 'fi': 'finnish',
+ 'fr': 'french',
+ 'fy': 'frisian',
+ 'ga': 'irish',
+ 'gl': 'galician',
+ 'he': 'hebrew',
+ 'hi': 'hindi',
+ 'hr': 'croatian',
+ 'hu': 'hungarian',
+ 'ia': 'interlingua',
+ 'id': 'indonesian',
+ 'io': 'ido',
+ 'is': 'icelandic',
+ 'it': 'italian',
+ 'ja': 'japanese',
+ 'ka': 'georgian',
+ 'kk': 'kazakh',
+ 'km': 'khmer',
+ 'kn': 'kannada',
+ 'ko': 'korean',
+ 'lb': 'luxembourgish',
+ 'lt': 'lithuanian',
+ 'lv': 'latvian',
+ 'mk': 'macedonian',
+ 'ml': 'malayalam',
+ 'mn': 'mongolian',
+ 'mr': 'marathi',
+ 'my': 'burmese',
+ 'nb': 'norwegian',
+ 'ne': 'nepali',
+ 'nl': 'dutch',
+ 'os': 'ossetic',
+ 'pa': 'punjabi',
+ 'pl': 'polish',
+ 'pt': 'portuguese',
+ 'ro': 'romanian',
+ 'ru': 'russian',
+ 'sk': 'slovak',
+ 'sl': 'slovenian',
+ 'sq': 'albanian',
+ 'sr': 'serbian',
+ 'sv': 'swedish',
+ 'sw': 'swahili',
+ 'ta': 'tamil',
+ 'te': 'telugu',
+ 'th': 'thai',
+ 'tr': 'turkish',
+ 'tt': 'tatar',
+ 'uk': 'ukrainian',
+ 'ur': 'urdu',
+ 'vi': 'vietnamese',
+ 'zh': 'chinese',
+ }
|
8cd859609a8a58474ff152d9adbb968ab3cdffa0
|
gaphor/diagram/diagramtools/tests/test_txtool.py
|
gaphor/diagram/diagramtools/tests/test_txtool.py
|
from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
|
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
Fix tests for tx tool
|
Fix tests for tx tool
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
- from unittest.mock import Mock
-
from gi.repository import Gtk
- from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
+ from gaphor.diagram.diagramtools.txtool import (
+ TxData,
+ on_begin,
+ on_end,
+ transactional_tool,
+ )
from gaphor.transaction import TransactionBegin
+ class MockEventManager:
+ def __init__(self):
+ self.events = []
+
+ def handle(self, event):
+ self.events.append(event)
+
+
- def xtest_start_tx_on_begin(view, event_manager):
+ def test_start_tx_on_begin(view):
- event_manager.handle = Mock()
+ event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
+ assert tx_data.tx
+ on_end(tool, None, tx_data)
- assert tx_data.tx
- assert event_manager.handle.called
- assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
+ assert event_manager.events
+ assert isinstance(event_manager.events[0], TransactionBegin)
+
|
Fix tests for tx tool
|
## Code Before:
from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
## Instruction:
Fix tests for tx tool
## Code After:
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
- from unittest.mock import Mock
-
from gi.repository import Gtk
- from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
+ from gaphor.diagram.diagramtools.txtool import (
+ TxData,
+ on_begin,
+ on_end,
+ transactional_tool,
+ )
from gaphor.transaction import TransactionBegin
+ class MockEventManager:
+ def __init__(self):
+ self.events = []
+
+ def handle(self, event):
+ self.events.append(event)
+
+
- def xtest_start_tx_on_begin(view, event_manager):
? - ---------------
+ def test_start_tx_on_begin(view):
- event_manager.handle = Mock()
+ event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
+ assert tx_data.tx
- assert tx_data.tx
+ on_end(tool, None, tx_data)
+
- assert event_manager.handle.called
? ^^ ^^^^^^^^^^
+ assert event_manager.events
? ^^^ ^^
- assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
? ^^ ^^^^^^^^^^^^^^^^^
+ assert isinstance(event_manager.events[0], TransactionBegin)
? ^^^ ^
|
7cac8f8ba591315d68e223503c4e93f976c8d89d
|
characters/views.py
|
characters/views.py
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
Set default race and class without extra database queries
|
Set default race and class without extra database queries
|
Python
|
mit
|
mpirnat/django-tutorial-v2
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
- race = Race.objects.get(id=1)
- cclass = Class.objects.get(id=1)
-
character = Character(
name=request.POST['name'],
background=request.POST['background'],
- race=race,
+ race_id=1,
- cclass=cclass
+ cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
Set default race and class without extra database queries
|
## Code Before:
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
## Instruction:
Set default race and class without extra database queries
## Code After:
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
- race = Race.objects.get(id=1)
- cclass = Class.objects.get(id=1)
-
character = Character(
name=request.POST['name'],
background=request.POST['background'],
- race=race,
? ^^^^
+ race_id=1,
? +++ ^
- cclass=cclass
? ^^^^^^
+ cclass_id=1
? +++ ^
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
308f9d8e1d4083bb7cc6bca0cf021118502d141b
|
marble/common.py
|
marble/common.py
|
def compute_totals(distribution, classes):
"Compute the number of individuals per class, per unit and in total"
N_unit = {au:sum([distribution[au][cl] for cl in classes]) for au in distribution}
N_class = {cl:sum([dist_a[cl] for dist_a in distribution.values()]) for cl in classes}
N_tot = sum(N_class.values())
return N_unit, N_class, N_tot
def regroup_per_class(distribution, classes):
"Return classes as they are presented in the data"
new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
for cl,composition in classes.iteritems()}
for au, dist_au in distribution.iteritems()}
return new_distribution
def return_categories(distribution):
"Return the categories in the original data"
keys = next(distribution.itervalues()).keys()
return {k:[k] for k in keys}
|
def compute_totals(distribution, classes):
"Compute the number of individuals per class, per unit and in total"
N_unit = {au:sum([distribution[au][cl] for cl in classes]) for au in distribution}
N_class = {cl:sum([dist_a[cl] for dist_a in distribution.values()]) for cl in classes}
N_tot = sum(N_class.values())
return N_unit, N_class, N_tot
def regroup_per_class(distribution, classes):
"Return classes as they are presented in the data"
try:
new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
for cl,composition in classes.iteritems()}
for au, dist_au in distribution.iteritems()}
except KeyError:
raise KeyError("Verify that the categories specified in the class"
" definitions exist in the original data.")
return new_distribution
def return_categories(distribution):
"Return the categories in the original data"
keys = next(distribution.itervalues()).keys()
return {k:[k] for k in keys}
|
Raise exception if faulty definition of classes inserted
|
Raise exception if faulty definition of classes inserted
|
Python
|
bsd-3-clause
|
scities/marble,walkerke/marble
|
def compute_totals(distribution, classes):
"Compute the number of individuals per class, per unit and in total"
N_unit = {au:sum([distribution[au][cl] for cl in classes]) for au in distribution}
N_class = {cl:sum([dist_a[cl] for dist_a in distribution.values()]) for cl in classes}
N_tot = sum(N_class.values())
return N_unit, N_class, N_tot
def regroup_per_class(distribution, classes):
"Return classes as they are presented in the data"
+
+ try:
- new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
+ new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
- for cl,composition in classes.iteritems()}
+ for cl,composition in classes.iteritems()}
- for au, dist_au in distribution.iteritems()}
+ for au, dist_au in distribution.iteritems()}
+
+ except KeyError:
+ raise KeyError("Verify that the categories specified in the class"
+ " definitions exist in the original data.")
return new_distribution
def return_categories(distribution):
"Return the categories in the original data"
keys = next(distribution.itervalues()).keys()
return {k:[k] for k in keys}
|
Raise exception if faulty definition of classes inserted
|
## Code Before:
def compute_totals(distribution, classes):
"Compute the number of individuals per class, per unit and in total"
N_unit = {au:sum([distribution[au][cl] for cl in classes]) for au in distribution}
N_class = {cl:sum([dist_a[cl] for dist_a in distribution.values()]) for cl in classes}
N_tot = sum(N_class.values())
return N_unit, N_class, N_tot
def regroup_per_class(distribution, classes):
"Return classes as they are presented in the data"
new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
for cl,composition in classes.iteritems()}
for au, dist_au in distribution.iteritems()}
return new_distribution
def return_categories(distribution):
"Return the categories in the original data"
keys = next(distribution.itervalues()).keys()
return {k:[k] for k in keys}
## Instruction:
Raise exception if faulty definition of classes inserted
## Code After:
def compute_totals(distribution, classes):
"Compute the number of individuals per class, per unit and in total"
N_unit = {au:sum([distribution[au][cl] for cl in classes]) for au in distribution}
N_class = {cl:sum([dist_a[cl] for dist_a in distribution.values()]) for cl in classes}
N_tot = sum(N_class.values())
return N_unit, N_class, N_tot
def regroup_per_class(distribution, classes):
"Return classes as they are presented in the data"
try:
new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
for cl,composition in classes.iteritems()}
for au, dist_au in distribution.iteritems()}
except KeyError:
raise KeyError("Verify that the categories specified in the class"
" definitions exist in the original data.")
return new_distribution
def return_categories(distribution):
"Return the categories in the original data"
keys = next(distribution.itervalues()).keys()
return {k:[k] for k in keys}
|
def compute_totals(distribution, classes):
"Compute the number of individuals per class, per unit and in total"
N_unit = {au:sum([distribution[au][cl] for cl in classes]) for au in distribution}
N_class = {cl:sum([dist_a[cl] for dist_a in distribution.values()]) for cl in classes}
N_tot = sum(N_class.values())
return N_unit, N_class, N_tot
def regroup_per_class(distribution, classes):
"Return classes as they are presented in the data"
+
+ try:
- new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
+ new_distribution = {au: {cl: sum([dist_au[c] for c in composition])
? ++++
- for cl,composition in classes.iteritems()}
+ for cl,composition in classes.iteritems()}
? ++++
- for au, dist_au in distribution.iteritems()}
+ for au, dist_au in distribution.iteritems()}
? ++++
+
+ except KeyError:
+ raise KeyError("Verify that the categories specified in the class"
+ " definitions exist in the original data.")
return new_distribution
def return_categories(distribution):
"Return the categories in the original data"
keys = next(distribution.itervalues()).keys()
return {k:[k] for k in keys}
|
76ed0bb6415209aa28350d4304e7b87715ba37f5
|
qllr/templating.py
|
qllr/templating.py
|
import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
Make templates to return relative path
|
Make templates to return relative path
|
Python
|
agpl-3.0
|
em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/pickup-rating,em92/pickup-rating,em92/quakelive-local-ratings
|
import typing
+ from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
+ # NOTE: take this stupid hack away, when url_for returns relative path
- return request.url_for(name, **path_params)
+ absolute_url = request.url_for(name, **path_params)
+ parsed_absolute_url = urlparse(absolute_url)
+ return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
Make templates to return relative path
|
## Code Before:
import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
## Instruction:
Make templates to return relative path
## Code After:
import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
import typing
+ from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
+ # NOTE: take this stupid hack away, when url_for returns relative path
- return request.url_for(name, **path_params)
? ^ ^ ^
+ absolute_url = request.url_for(name, **path_params)
? ^^^^^^^ ^ ^^^
+ parsed_absolute_url = urlparse(absolute_url)
+ return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
d19fd61e746ab4afcb534df6be933716e154b715
|
memopol/search/templatetags/search_tags.py
|
memopol/search/templatetags/search_tags.py
|
from django.core.urlresolvers import reverse
from django.template.defaultfilters import urlencode
from django import template
from dynamiq.utils import get_advanced_search_formset_class
from ..forms import MEPSearchForm, MEPSearchAdvancedFormset
register = template.Library()
@register.simple_tag
def simple_search_shortcut(search_string):
"""
Return a simple search URL from a search string, like "daniel OR country:CZ".
"""
base_url = reverse("search")
return "%s?q=%s" % (base_url, urlencode(search_string))
@register.inclusion_tag('blocks/search_form.html', takes_context=True)
def render_search_form(context):
"""
Display the search form, if a `dynamiq` key is on the context, it will
used, otherwise, it create an empty form
"""
if 'dynamiq' in context:
dynamiq = context['dynamiq']
else:
request = context['request']
formset_class = get_advanced_search_formset_class(request.user, MEPSearchAdvancedFormset, MEPSearchForm)
formset = formset_class(None)
dynamiq = {
"q": "",
"label": "",
"formset": formset,
}
return {
'dynamiq': dynamiq
}
|
from django.core.urlresolvers import reverse
from django.template.defaultfilters import urlencode
from django import template
from dynamiq.utils import get_advanced_search_formset_class
from ..forms import MEPSearchForm, MEPSearchAdvancedFormset
register = template.Library()
@register.simple_tag
def simple_search_shortcut(search_string, sort=None):
"""
Return a simple search URL from a search string, like "daniel OR country:CZ".
"""
base_url = reverse("search")
query_string = "q=%s" % urlencode(search_string)
if sort:
query_string = "%s&sort=%s" % (query_string, sort)
return "%s?%s" % (base_url, query_string)
@register.inclusion_tag('blocks/search_form.html', takes_context=True)
def render_search_form(context):
"""
Display the search form, if a `dynamiq` key is on the context, it will
used, otherwise, it create an empty form
"""
if 'dynamiq' in context:
dynamiq = context['dynamiq']
else:
request = context['request']
formset_class = get_advanced_search_formset_class(request.user, MEPSearchAdvancedFormset, MEPSearchForm)
formset = formset_class(None)
dynamiq = {
"q": "",
"label": "",
"formset": formset,
}
return {
'dynamiq': dynamiq
}
|
Make possible to define a sort in simple_search_shortcut tt
|
[enh] Make possible to define a sort in simple_search_shortcut tt
|
Python
|
agpl-3.0
|
yohanboniface/memopol-core,yohanboniface/memopol-core,yohanboniface/memopol-core
|
from django.core.urlresolvers import reverse
from django.template.defaultfilters import urlencode
from django import template
from dynamiq.utils import get_advanced_search_formset_class
from ..forms import MEPSearchForm, MEPSearchAdvancedFormset
register = template.Library()
@register.simple_tag
- def simple_search_shortcut(search_string):
+ def simple_search_shortcut(search_string, sort=None):
"""
Return a simple search URL from a search string, like "daniel OR country:CZ".
"""
base_url = reverse("search")
+ query_string = "q=%s" % urlencode(search_string)
+ if sort:
+ query_string = "%s&sort=%s" % (query_string, sort)
- return "%s?q=%s" % (base_url, urlencode(search_string))
+ return "%s?%s" % (base_url, query_string)
@register.inclusion_tag('blocks/search_form.html', takes_context=True)
def render_search_form(context):
"""
Display the search form, if a `dynamiq` key is on the context, it will
used, otherwise, it create an empty form
"""
if 'dynamiq' in context:
dynamiq = context['dynamiq']
else:
request = context['request']
formset_class = get_advanced_search_formset_class(request.user, MEPSearchAdvancedFormset, MEPSearchForm)
formset = formset_class(None)
dynamiq = {
"q": "",
"label": "",
"formset": formset,
}
return {
'dynamiq': dynamiq
}
|
Make possible to define a sort in simple_search_shortcut tt
|
## Code Before:
from django.core.urlresolvers import reverse
from django.template.defaultfilters import urlencode
from django import template
from dynamiq.utils import get_advanced_search_formset_class
from ..forms import MEPSearchForm, MEPSearchAdvancedFormset
register = template.Library()
@register.simple_tag
def simple_search_shortcut(search_string):
"""
Return a simple search URL from a search string, like "daniel OR country:CZ".
"""
base_url = reverse("search")
return "%s?q=%s" % (base_url, urlencode(search_string))
@register.inclusion_tag('blocks/search_form.html', takes_context=True)
def render_search_form(context):
"""
Display the search form, if a `dynamiq` key is on the context, it will
used, otherwise, it create an empty form
"""
if 'dynamiq' in context:
dynamiq = context['dynamiq']
else:
request = context['request']
formset_class = get_advanced_search_formset_class(request.user, MEPSearchAdvancedFormset, MEPSearchForm)
formset = formset_class(None)
dynamiq = {
"q": "",
"label": "",
"formset": formset,
}
return {
'dynamiq': dynamiq
}
## Instruction:
Make possible to define a sort in simple_search_shortcut tt
## Code After:
from django.core.urlresolvers import reverse
from django.template.defaultfilters import urlencode
from django import template
from dynamiq.utils import get_advanced_search_formset_class
from ..forms import MEPSearchForm, MEPSearchAdvancedFormset
register = template.Library()
@register.simple_tag
def simple_search_shortcut(search_string, sort=None):
"""
Return a simple search URL from a search string, like "daniel OR country:CZ".
"""
base_url = reverse("search")
query_string = "q=%s" % urlencode(search_string)
if sort:
query_string = "%s&sort=%s" % (query_string, sort)
return "%s?%s" % (base_url, query_string)
@register.inclusion_tag('blocks/search_form.html', takes_context=True)
def render_search_form(context):
"""
Display the search form, if a `dynamiq` key is on the context, it will
used, otherwise, it create an empty form
"""
if 'dynamiq' in context:
dynamiq = context['dynamiq']
else:
request = context['request']
formset_class = get_advanced_search_formset_class(request.user, MEPSearchAdvancedFormset, MEPSearchForm)
formset = formset_class(None)
dynamiq = {
"q": "",
"label": "",
"formset": formset,
}
return {
'dynamiq': dynamiq
}
|
from django.core.urlresolvers import reverse
from django.template.defaultfilters import urlencode
from django import template
from dynamiq.utils import get_advanced_search_formset_class
from ..forms import MEPSearchForm, MEPSearchAdvancedFormset
register = template.Library()
@register.simple_tag
- def simple_search_shortcut(search_string):
+ def simple_search_shortcut(search_string, sort=None):
? +++++++++++
"""
Return a simple search URL from a search string, like "daniel OR country:CZ".
"""
base_url = reverse("search")
+ query_string = "q=%s" % urlencode(search_string)
+ if sort:
+ query_string = "%s&sort=%s" % (query_string, sort)
- return "%s?q=%s" % (base_url, urlencode(search_string))
? -- ^^^^^^^^^^^^^^ -
+ return "%s?%s" % (base_url, query_string)
? + + ^
@register.inclusion_tag('blocks/search_form.html', takes_context=True)
def render_search_form(context):
"""
Display the search form, if a `dynamiq` key is on the context, it will
used, otherwise, it create an empty form
"""
if 'dynamiq' in context:
dynamiq = context['dynamiq']
else:
request = context['request']
formset_class = get_advanced_search_formset_class(request.user, MEPSearchAdvancedFormset, MEPSearchForm)
formset = formset_class(None)
dynamiq = {
"q": "",
"label": "",
"formset": formset,
}
return {
'dynamiq': dynamiq
}
|
96d798685c53f4568edaaf990b0bbe8e2e10e24a
|
tests_tf/test_mnist_tutorial_jsma.py
|
tests_tf/test_mnist_tutorial_jsma.py
|
import unittest
class TestMNISTTutorialJSMA(unittest.TestCase):
def test_mnist_tutorial_jsma(self):
from tutorials import mnist_tutorial_jsma
# Run the MNIST tutorial on a dataset of reduced size
# and disable visualization.
jsma_tutorial_args = {'train_start': 0,
'train_end': 10000,
'test_start': 0,
'test_end': 1666,
'viz_enabled': False,
'source_samples': 1,
'nb_epochs': 2}
report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args)
print(report.clean_train_adv_eval)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.clean_train_clean_eval > 0.75)
self.assertTrue(report.clean_train_adv_eval < 0.05)
# There is no adversarial training in the JSMA tutorial
self.assertTrue(report.adv_train_clean_eval == 0.)
self.assertTrue(report.adv_train_adv_eval == 0.)
if __name__ == '__main__':
unittest.main()
|
import unittest
class TestMNISTTutorialJSMA(unittest.TestCase):
def test_mnist_tutorial_jsma(self):
from tutorials import mnist_tutorial_jsma
# Run the MNIST tutorial on a dataset of reduced size
# and disable visualization.
jsma_tutorial_args = {'train_start': 0,
'train_end': 1000,
'test_start': 0,
'test_end': 1666,
'viz_enabled': False,
'source_samples': 1,
'nb_epochs': 2}
report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.clean_train_clean_eval > 0.75)
self.assertTrue(report.clean_train_adv_eval < 0.05)
# There is no adversarial training in the JSMA tutorial
self.assertTrue(report.adv_train_clean_eval == 0.)
self.assertTrue(report.adv_train_adv_eval == 0.)
if __name__ == '__main__':
unittest.main()
|
Update JSMA test tutorial constant
|
Update JSMA test tutorial constant
|
Python
|
mit
|
cleverhans-lab/cleverhans,cleverhans-lab/cleverhans,openai/cleverhans,carlini/cleverhans,cihangxie/cleverhans,cleverhans-lab/cleverhans,carlini/cleverhans,fartashf/cleverhans
|
import unittest
class TestMNISTTutorialJSMA(unittest.TestCase):
def test_mnist_tutorial_jsma(self):
from tutorials import mnist_tutorial_jsma
# Run the MNIST tutorial on a dataset of reduced size
# and disable visualization.
jsma_tutorial_args = {'train_start': 0,
- 'train_end': 10000,
+ 'train_end': 1000,
'test_start': 0,
'test_end': 1666,
'viz_enabled': False,
'source_samples': 1,
'nb_epochs': 2}
report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args)
- print(report.clean_train_adv_eval)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.clean_train_clean_eval > 0.75)
self.assertTrue(report.clean_train_adv_eval < 0.05)
# There is no adversarial training in the JSMA tutorial
self.assertTrue(report.adv_train_clean_eval == 0.)
self.assertTrue(report.adv_train_adv_eval == 0.)
if __name__ == '__main__':
unittest.main()
|
Update JSMA test tutorial constant
|
## Code Before:
import unittest
class TestMNISTTutorialJSMA(unittest.TestCase):
def test_mnist_tutorial_jsma(self):
from tutorials import mnist_tutorial_jsma
# Run the MNIST tutorial on a dataset of reduced size
# and disable visualization.
jsma_tutorial_args = {'train_start': 0,
'train_end': 10000,
'test_start': 0,
'test_end': 1666,
'viz_enabled': False,
'source_samples': 1,
'nb_epochs': 2}
report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args)
print(report.clean_train_adv_eval)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.clean_train_clean_eval > 0.75)
self.assertTrue(report.clean_train_adv_eval < 0.05)
# There is no adversarial training in the JSMA tutorial
self.assertTrue(report.adv_train_clean_eval == 0.)
self.assertTrue(report.adv_train_adv_eval == 0.)
if __name__ == '__main__':
unittest.main()
## Instruction:
Update JSMA test tutorial constant
## Code After:
import unittest
class TestMNISTTutorialJSMA(unittest.TestCase):
def test_mnist_tutorial_jsma(self):
from tutorials import mnist_tutorial_jsma
# Run the MNIST tutorial on a dataset of reduced size
# and disable visualization.
jsma_tutorial_args = {'train_start': 0,
'train_end': 1000,
'test_start': 0,
'test_end': 1666,
'viz_enabled': False,
'source_samples': 1,
'nb_epochs': 2}
report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.clean_train_clean_eval > 0.75)
self.assertTrue(report.clean_train_adv_eval < 0.05)
# There is no adversarial training in the JSMA tutorial
self.assertTrue(report.adv_train_clean_eval == 0.)
self.assertTrue(report.adv_train_adv_eval == 0.)
if __name__ == '__main__':
unittest.main()
|
import unittest
class TestMNISTTutorialJSMA(unittest.TestCase):
def test_mnist_tutorial_jsma(self):
from tutorials import mnist_tutorial_jsma
# Run the MNIST tutorial on a dataset of reduced size
# and disable visualization.
jsma_tutorial_args = {'train_start': 0,
- 'train_end': 10000,
? -
+ 'train_end': 1000,
'test_start': 0,
'test_end': 1666,
'viz_enabled': False,
'source_samples': 1,
'nb_epochs': 2}
report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args)
- print(report.clean_train_adv_eval)
# Check accuracy values contained in the AccuracyReport object
self.assertTrue(report.clean_train_clean_eval > 0.75)
self.assertTrue(report.clean_train_adv_eval < 0.05)
# There is no adversarial training in the JSMA tutorial
self.assertTrue(report.adv_train_clean_eval == 0.)
self.assertTrue(report.adv_train_adv_eval == 0.)
if __name__ == '__main__':
unittest.main()
|
5d332259e16758bc43201073db91409390be9134
|
UM/Operations/GroupedOperation.py
|
UM/Operations/GroupedOperation.py
|
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
Remove removeOperation from grouped operation
|
Remove removeOperation from grouped operation
This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
- ## Removes an operation from this group.
- def removeOperation(self, index):
- del self._children[index]
-
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
Remove removeOperation from grouped operation
|
## Code Before:
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
## Instruction:
Remove removeOperation from grouped operation
## Code After:
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
- ## Removes an operation from this group.
- def removeOperation(self, index):
- del self._children[index]
-
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
1513532e473866438ac9dabbfb462e9348a5895e
|
hug/output_format.py
|
hug/output_format.py
|
import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
|
import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
|
Add the ability for individual objects to define how they would like there data to be outputed for json
|
Add the ability for individual objects to define how they would like there data to be outputed for json
|
Python
|
mit
|
janusnic/hug,yasoob/hug,janusnic/hug,shaunstanislaus/hug,timothycrosley/hug,alisaifee/hug,gbn972/hug,MuhammadAlkarouri/hug,philiptzou/hug,giserh/hug,timothycrosley/hug,STANAPO/hug,shaunstanislaus/hug,STANAPO/hug,origingod/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,alisaifee/hug,giserh/hug,yasoob/hug,gbn972/hug,philiptzou/hug,jean/hug,timothycrosley/hug,jean/hug,origingod/hug
|
import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
+ elif getattr(item, '__json__', None):
+ return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
|
Add the ability for individual objects to define how they would like there data to be outputed for json
|
## Code Before:
import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
## Instruction:
Add the ability for individual objects to define how they would like there data to be outputed for json
## Code After:
import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
|
import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
+ elif getattr(item, '__json__', None):
+ return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
|
c008171b93371c72a2f2a2698f514d267e312837
|
tests/testapp/urls.py
|
tests/testapp/urls.py
|
from django.conf.urls import url
from django.contrib import admin
from .views import ArticleView, PageView
urlpatterns = [
url(r"^admin/", admin.site.urls),
url(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
url(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
]
|
from django.urls import re_path
from django.contrib import admin
from .views import ArticleView, PageView
urlpatterns = [
re_path(r"^admin/", admin.site.urls),
re_path(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
re_path(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
]
|
Switch from url() to re_path()
|
Switch from url() to re_path()
|
Python
|
bsd-3-clause
|
matthiask/django-content-editor,matthiask/feincms2-content,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/django-content-editor
|
- from django.conf.urls import url
+ from django.urls import re_path
from django.contrib import admin
from .views import ArticleView, PageView
urlpatterns = [
- url(r"^admin/", admin.site.urls),
+ re_path(r"^admin/", admin.site.urls),
- url(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
+ re_path(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
- url(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
+ re_path(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
]
|
Switch from url() to re_path()
|
## Code Before:
from django.conf.urls import url
from django.contrib import admin
from .views import ArticleView, PageView
urlpatterns = [
url(r"^admin/", admin.site.urls),
url(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
url(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
]
## Instruction:
Switch from url() to re_path()
## Code After:
from django.urls import re_path
from django.contrib import admin
from .views import ArticleView, PageView
urlpatterns = [
re_path(r"^admin/", admin.site.urls),
re_path(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
re_path(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
]
|
- from django.conf.urls import url
? ----- - ^
+ from django.urls import re_path
? ^^^^^^
from django.contrib import admin
from .views import ArticleView, PageView
urlpatterns = [
- url(r"^admin/", admin.site.urls),
? - ^
+ re_path(r"^admin/", admin.site.urls),
? ^^^^^^
- url(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
? - ^
+ re_path(r"^articles/(?P<pk>\d+)/$", ArticleView.as_view(), name="article_detail"),
? ^^^^^^
- url(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
? - ^
+ re_path(r"^pages/(?P<pk>\d+)/$", PageView.as_view(), name="page_detail"),
? ^^^^^^
]
|
08652630865a706126ac61420edb55298296d2eb
|
abilian/services/__init__.py
|
abilian/services/__init__.py
|
__all__ = ['Service', 'ServiceState',
'audit_service', 'index_service', 'activity_service', 'auth_service']
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
from .activity import ActivityService
activity_service = ActivityService()
from .auth import AuthService
auth_service = AuthService()
|
from flask import current_app
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
from .activity import ActivityService
from .auth import AuthService
__all__ = ['Service', 'ServiceState', 'get_service',
'audit_service', 'index_service', 'activity_service', 'auth_service']
auth_service = AuthService()
activity_service = ActivityService()
def get_service(service):
return current_app.services.get(service)
|
Add a get_service convenience method.
|
Add a get_service convenience method.
|
Python
|
lgpl-2.1
|
abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core
|
+ from flask import current_app
- __all__ = ['Service', 'ServiceState',
- 'audit_service', 'index_service', 'activity_service', 'auth_service']
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
+ from .activity import ActivityService
+ from .auth import AuthService
- from .activity import ActivityService
+ __all__ = ['Service', 'ServiceState', 'get_service',
+ 'audit_service', 'index_service', 'activity_service', 'auth_service']
+
+ auth_service = AuthService()
activity_service = ActivityService()
- from .auth import AuthService
- auth_service = AuthService()
+ def get_service(service):
+ return current_app.services.get(service)
+
|
Add a get_service convenience method.
|
## Code Before:
__all__ = ['Service', 'ServiceState',
'audit_service', 'index_service', 'activity_service', 'auth_service']
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
from .activity import ActivityService
activity_service = ActivityService()
from .auth import AuthService
auth_service = AuthService()
## Instruction:
Add a get_service convenience method.
## Code After:
from flask import current_app
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
from .activity import ActivityService
from .auth import AuthService
__all__ = ['Service', 'ServiceState', 'get_service',
'audit_service', 'index_service', 'activity_service', 'auth_service']
auth_service = AuthService()
activity_service = ActivityService()
def get_service(service):
return current_app.services.get(service)
|
+ from flask import current_app
- __all__ = ['Service', 'ServiceState',
- 'audit_service', 'index_service', 'activity_service', 'auth_service']
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
+ from .activity import ActivityService
+ from .auth import AuthService
- from .activity import ActivityService
+ __all__ = ['Service', 'ServiceState', 'get_service',
+ 'audit_service', 'index_service', 'activity_service', 'auth_service']
+
+ auth_service = AuthService()
activity_service = ActivityService()
- from .auth import AuthService
- auth_service = AuthService()
+
+ def get_service(service):
+ return current_app.services.get(service)
|
362827987bb6587e1440f5f3fa804635e426eb5f
|
windpowerlib/__init__.py
|
windpowerlib/__init__.py
|
__copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
from .wind_turbine import (
WindTurbine,
get_turbine_types,
create_power_curve,
) # noqa: F401
from .wind_farm import WindFarm # noqa: F401
from .wind_turbine_cluster import WindTurbineCluster # noqa: F401
from .modelchain import ModelChain # noqa: F401
from .turbine_cluster_modelchain import TurbineClusterModelChain # noqa: F401
|
__copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
from .wind_turbine import WindTurbine # noqa: F401
from .wind_turbine import get_turbine_types # noqa: F401
from .wind_turbine import create_power_curve # noqa: F401
from .wind_farm import WindFarm # noqa: F401
from .wind_turbine_cluster import WindTurbineCluster # noqa: F401
from .modelchain import ModelChain # noqa: F401
from .turbine_cluster_modelchain import TurbineClusterModelChain # noqa: F401
|
Use one line per import
|
Use one line per import
|
Python
|
mit
|
wind-python/windpowerlib
|
__copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
+ from .wind_turbine import WindTurbine # noqa: F401
+ from .wind_turbine import get_turbine_types # noqa: F401
+ from .wind_turbine import create_power_curve # noqa: F401
- from .wind_turbine import (
- WindTurbine,
- get_turbine_types,
- create_power_curve,
- ) # noqa: F401
from .wind_farm import WindFarm # noqa: F401
from .wind_turbine_cluster import WindTurbineCluster # noqa: F401
from .modelchain import ModelChain # noqa: F401
from .turbine_cluster_modelchain import TurbineClusterModelChain # noqa: F401
|
Use one line per import
|
## Code Before:
__copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
from .wind_turbine import (
WindTurbine,
get_turbine_types,
create_power_curve,
) # noqa: F401
from .wind_farm import WindFarm # noqa: F401
from .wind_turbine_cluster import WindTurbineCluster # noqa: F401
from .modelchain import ModelChain # noqa: F401
from .turbine_cluster_modelchain import TurbineClusterModelChain # noqa: F401
## Instruction:
Use one line per import
## Code After:
__copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
from .wind_turbine import WindTurbine # noqa: F401
from .wind_turbine import get_turbine_types # noqa: F401
from .wind_turbine import create_power_curve # noqa: F401
from .wind_farm import WindFarm # noqa: F401
from .wind_turbine_cluster import WindTurbineCluster # noqa: F401
from .modelchain import ModelChain # noqa: F401
from .turbine_cluster_modelchain import TurbineClusterModelChain # noqa: F401
|
__copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
+ from .wind_turbine import WindTurbine # noqa: F401
+ from .wind_turbine import get_turbine_types # noqa: F401
+ from .wind_turbine import create_power_curve # noqa: F401
- from .wind_turbine import (
- WindTurbine,
- get_turbine_types,
- create_power_curve,
- ) # noqa: F401
from .wind_farm import WindFarm # noqa: F401
from .wind_turbine_cluster import WindTurbineCluster # noqa: F401
from .modelchain import ModelChain # noqa: F401
from .turbine_cluster_modelchain import TurbineClusterModelChain # noqa: F401
|
494378256dc5fc6fed290a87afcbcc79b31eb37e
|
linguine/ops/word_cloud_op.py
|
linguine/ops/word_cloud_op.py
|
class WordCloudOp:
def run(self, data):
terms = { }
results = [ ]
try:
for corpus in data:
tokens = corpus.tokenized_contents
for token in tokens:
if token in terms:
terms[token]+=1
else:
terms[token]=1
for term in terms:
results.append({ "term" : term, "frequency" : terms[term]})
return results
except LookupError:
raise TransactionException('NLTK \'Punkt\' Model not installed.', 500)
except TypeError:
raise TransactionException('Corpus contents does not exist.')
|
class WordCloudOp:
def run(self, data):
terms = { }
results = [ ]
try:
for corpus in data:
tokens = corpus.tokenized_contents
for token in tokens:
if token in terms:
terms[token]+=1
else:
terms[token]=1
for term in terms:
results.append({ "term" : term, "frequency" : terms[term]})
#sort results by term frequency
results.sort(key=lambda results: results['frequency'], reverse=False)
return results
except LookupError:
raise TransactionException('NLTK \'Punkt\' Model not installed.', 500)
except TypeError:
raise TransactionException('Corpus contents does not exist.')
|
Sort term frequency results by frequency
|
Sort term frequency results by frequency
|
Python
|
mit
|
rigatoni/linguine-python,Pastafarians/linguine-python
|
class WordCloudOp:
def run(self, data):
terms = { }
results = [ ]
try:
for corpus in data:
tokens = corpus.tokenized_contents
for token in tokens:
if token in terms:
terms[token]+=1
else:
terms[token]=1
for term in terms:
results.append({ "term" : term, "frequency" : terms[term]})
+
+ #sort results by term frequency
+ results.sort(key=lambda results: results['frequency'], reverse=False)
+
return results
except LookupError:
raise TransactionException('NLTK \'Punkt\' Model not installed.', 500)
except TypeError:
raise TransactionException('Corpus contents does not exist.')
|
Sort term frequency results by frequency
|
## Code Before:
class WordCloudOp:
def run(self, data):
terms = { }
results = [ ]
try:
for corpus in data:
tokens = corpus.tokenized_contents
for token in tokens:
if token in terms:
terms[token]+=1
else:
terms[token]=1
for term in terms:
results.append({ "term" : term, "frequency" : terms[term]})
return results
except LookupError:
raise TransactionException('NLTK \'Punkt\' Model not installed.', 500)
except TypeError:
raise TransactionException('Corpus contents does not exist.')
## Instruction:
Sort term frequency results by frequency
## Code After:
class WordCloudOp:
def run(self, data):
terms = { }
results = [ ]
try:
for corpus in data:
tokens = corpus.tokenized_contents
for token in tokens:
if token in terms:
terms[token]+=1
else:
terms[token]=1
for term in terms:
results.append({ "term" : term, "frequency" : terms[term]})
#sort results by term frequency
results.sort(key=lambda results: results['frequency'], reverse=False)
return results
except LookupError:
raise TransactionException('NLTK \'Punkt\' Model not installed.', 500)
except TypeError:
raise TransactionException('Corpus contents does not exist.')
|
class WordCloudOp:
def run(self, data):
terms = { }
results = [ ]
try:
for corpus in data:
tokens = corpus.tokenized_contents
for token in tokens:
if token in terms:
terms[token]+=1
else:
terms[token]=1
for term in terms:
results.append({ "term" : term, "frequency" : terms[term]})
+
+ #sort results by term frequency
+ results.sort(key=lambda results: results['frequency'], reverse=False)
+
return results
except LookupError:
raise TransactionException('NLTK \'Punkt\' Model not installed.', 500)
except TypeError:
raise TransactionException('Corpus contents does not exist.')
|
97d2b5b55a6cec3644a323662e52b9b256c18f33
|
mdx_linkify/mdx_linkify.py
|
mdx_linkify/mdx_linkify.py
|
from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
Fix IndexError: pop from empty list
|
fix: Fix IndexError: pop from empty list
Create Linker instance for each run, to bypass html5lib bugs
Refs #15
|
Python
|
mit
|
daGrevis/mdx_linkify
|
from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
- self._linker = Linker(**linker_options)
+ self._linker_options = linker_options
def run(self, text):
+ linker = Linker(**self._linker_options)
- return self._linker.linkify(text)
+ return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
Fix IndexError: pop from empty list
|
## Code Before:
from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
## Instruction:
Fix IndexError: pop from empty list
## Code After:
from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
- self._linker = Linker(**linker_options)
? --------- -
+ self._linker_options = linker_options
? ++++++++
def run(self, text):
+ linker = Linker(**self._linker_options)
- return self._linker.linkify(text)
? ------
+ return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
cfd2312ae81dd79832d4b03717278a79bc8705d1
|
brte/converters/btf.py
|
brte/converters/btf.py
|
if 'imported' in locals():
import imp
import bpy
imp.reload(blendergltf)
else:
imported = True
from . import blendergltf
import json
import math
import bpy
class BTFConverter:
def convert(self, add_delta, update_delta, remove_delta, view_delta):
for key, value in update_delta.items():
if value:
add_delta[key] = value
data = blendergltf.export_gltf(add_delta)
self.export_view(view_delta, data)
return data
def export_view(self, view_delta, gltf):
if 'extras' not in gltf:
gltf['extras'] = {}
if 'viewport' in view_delta:
gltf['extras']['view'] = {
'width' : view_delta['viewport'].width,
'height' : view_delta['viewport'].width,
'projection_matrix': view_delta['projection_matrix'],
'view_matrix': view_delta['view_matrix'],
}
|
if 'imported' in locals():
import imp
import bpy
imp.reload(blendergltf)
else:
imported = True
from . import blendergltf
import json
import math
import bpy
def togl(matrix):
return [i for col in matrix.col for i in col]
class BTFConverter:
def convert(self, add_delta, update_delta, remove_delta, view_delta):
for key, value in update_delta.items():
if value:
add_delta[key] = value
data = blendergltf.export_gltf(add_delta)
self.export_view(view_delta, data)
return data
def export_view(self, view_delta, gltf):
if 'extras' not in gltf:
gltf['extras'] = {}
if 'viewport' in view_delta:
gltf['extras']['view'] = {
'width' : view_delta['viewport'].width,
'height' : view_delta['viewport'].width,
'projection_matrix': togl(view_delta['projection_matrix']),
'view_matrix': togl(view_delta['view_matrix']),
}
|
Fix JSON serialization issue with view and projection matrices
|
Fix JSON serialization issue with view and projection matrices
|
Python
|
mit
|
Kupoman/BlenderRealtimeEngineAddon
|
if 'imported' in locals():
import imp
import bpy
imp.reload(blendergltf)
else:
imported = True
from . import blendergltf
import json
import math
import bpy
+
+
+ def togl(matrix):
+ return [i for col in matrix.col for i in col]
class BTFConverter:
def convert(self, add_delta, update_delta, remove_delta, view_delta):
for key, value in update_delta.items():
if value:
add_delta[key] = value
data = blendergltf.export_gltf(add_delta)
self.export_view(view_delta, data)
return data
def export_view(self, view_delta, gltf):
if 'extras' not in gltf:
gltf['extras'] = {}
if 'viewport' in view_delta:
gltf['extras']['view'] = {
'width' : view_delta['viewport'].width,
'height' : view_delta['viewport'].width,
- 'projection_matrix': view_delta['projection_matrix'],
+ 'projection_matrix': togl(view_delta['projection_matrix']),
- 'view_matrix': view_delta['view_matrix'],
+ 'view_matrix': togl(view_delta['view_matrix']),
}
|
Fix JSON serialization issue with view and projection matrices
|
## Code Before:
if 'imported' in locals():
import imp
import bpy
imp.reload(blendergltf)
else:
imported = True
from . import blendergltf
import json
import math
import bpy
class BTFConverter:
def convert(self, add_delta, update_delta, remove_delta, view_delta):
for key, value in update_delta.items():
if value:
add_delta[key] = value
data = blendergltf.export_gltf(add_delta)
self.export_view(view_delta, data)
return data
def export_view(self, view_delta, gltf):
if 'extras' not in gltf:
gltf['extras'] = {}
if 'viewport' in view_delta:
gltf['extras']['view'] = {
'width' : view_delta['viewport'].width,
'height' : view_delta['viewport'].width,
'projection_matrix': view_delta['projection_matrix'],
'view_matrix': view_delta['view_matrix'],
}
## Instruction:
Fix JSON serialization issue with view and projection matrices
## Code After:
if 'imported' in locals():
import imp
import bpy
imp.reload(blendergltf)
else:
imported = True
from . import blendergltf
import json
import math
import bpy
def togl(matrix):
return [i for col in matrix.col for i in col]
class BTFConverter:
def convert(self, add_delta, update_delta, remove_delta, view_delta):
for key, value in update_delta.items():
if value:
add_delta[key] = value
data = blendergltf.export_gltf(add_delta)
self.export_view(view_delta, data)
return data
def export_view(self, view_delta, gltf):
if 'extras' not in gltf:
gltf['extras'] = {}
if 'viewport' in view_delta:
gltf['extras']['view'] = {
'width' : view_delta['viewport'].width,
'height' : view_delta['viewport'].width,
'projection_matrix': togl(view_delta['projection_matrix']),
'view_matrix': togl(view_delta['view_matrix']),
}
|
if 'imported' in locals():
import imp
import bpy
imp.reload(blendergltf)
else:
imported = True
from . import blendergltf
import json
import math
import bpy
+
+
+ def togl(matrix):
+ return [i for col in matrix.col for i in col]
class BTFConverter:
def convert(self, add_delta, update_delta, remove_delta, view_delta):
for key, value in update_delta.items():
if value:
add_delta[key] = value
data = blendergltf.export_gltf(add_delta)
self.export_view(view_delta, data)
return data
def export_view(self, view_delta, gltf):
if 'extras' not in gltf:
gltf['extras'] = {}
if 'viewport' in view_delta:
gltf['extras']['view'] = {
'width' : view_delta['viewport'].width,
'height' : view_delta['viewport'].width,
- 'projection_matrix': view_delta['projection_matrix'],
+ 'projection_matrix': togl(view_delta['projection_matrix']),
? +++++ +
- 'view_matrix': view_delta['view_matrix'],
+ 'view_matrix': togl(view_delta['view_matrix']),
? +++++ +
}
|
02160f46d5e28c394915d44c42e4e1b09e750717
|
utils/rest.py
|
utils/rest.py
|
import json
import logging
import requests
import plugins.settings as settings
headers = {'accept': 'application/json'}
def get(config, path, data=None):
request = requests.get(
url=__format_url(config, path),
params=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
logging.debug('GET %s - Response %s - Data %s'
% (request.url, request.status_code, data))
return request
def delete(config, path, data):
request = requests.delete(
url=__format_url(config, path),
data=json.dumps(data),
headers={
'Content-type': 'application/json',
'Accept': 'application/json'
},
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
logging.debug('DELETE %s - Response %s - Data %s'
% (request.url, request.status_code, data))
return request
def post(config, path, data=None):
request = requests.post(
url=__format_url(config, path),
data=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
logging.debug('POST %s - Response %s - Data %s'
% (request.url, request.status_code, data))
return request
def __format_url(config, path):
return '{server}{path}'.format(server=config['host'], path=path)
|
import json
import requests
import plugins.settings as settings
headers = {'accept': 'application/json'}
def get(config, path, data=None):
auth = None
if 'username' in config and 'password' in config:
auth = (config['username'], config['password'])
request = requests.get(
url=__format_url(config, path),
params=data,
headers=headers,
auth=auth,
verify=settings.servers.verify_ssl)
return request
def delete(config, path, data):
request = requests.delete(
url=__format_url(config, path),
data=json.dumps(data),
headers={
'Content-type': 'application/json',
'Accept': 'application/json'
},
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
return request
def post(config, path, data=None):
request = requests.post(
url=__format_url(config, path),
data=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
return request
def __format_url(config, path):
return '{server}{path}'.format(server=config['host'], path=path)
|
Remove logging and allow anonymous access (for Crucible for example)
|
Remove logging and allow anonymous access (for Crucible for example)
|
Python
|
mit
|
gpailler/AtlassianBot
|
import json
- import logging
import requests
import plugins.settings as settings
headers = {'accept': 'application/json'}
def get(config, path, data=None):
+ auth = None
+ if 'username' in config and 'password' in config:
+ auth = (config['username'], config['password'])
+
request = requests.get(
url=__format_url(config, path),
params=data,
headers=headers,
- auth=(config['username'], config['password']),
+ auth=auth,
verify=settings.servers.verify_ssl)
-
- logging.debug('GET %s - Response %s - Data %s'
- % (request.url, request.status_code, data))
return request
def delete(config, path, data):
request = requests.delete(
url=__format_url(config, path),
data=json.dumps(data),
headers={
'Content-type': 'application/json',
'Accept': 'application/json'
},
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
- logging.debug('DELETE %s - Response %s - Data %s'
- % (request.url, request.status_code, data))
-
return request
def post(config, path, data=None):
request = requests.post(
url=__format_url(config, path),
data=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
- logging.debug('POST %s - Response %s - Data %s'
- % (request.url, request.status_code, data))
-
return request
def __format_url(config, path):
return '{server}{path}'.format(server=config['host'], path=path)
|
Remove logging and allow anonymous access (for Crucible for example)
|
## Code Before:
import json
import logging
import requests
import plugins.settings as settings
headers = {'accept': 'application/json'}
def get(config, path, data=None):
request = requests.get(
url=__format_url(config, path),
params=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
logging.debug('GET %s - Response %s - Data %s'
% (request.url, request.status_code, data))
return request
def delete(config, path, data):
request = requests.delete(
url=__format_url(config, path),
data=json.dumps(data),
headers={
'Content-type': 'application/json',
'Accept': 'application/json'
},
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
logging.debug('DELETE %s - Response %s - Data %s'
% (request.url, request.status_code, data))
return request
def post(config, path, data=None):
request = requests.post(
url=__format_url(config, path),
data=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
logging.debug('POST %s - Response %s - Data %s'
% (request.url, request.status_code, data))
return request
def __format_url(config, path):
return '{server}{path}'.format(server=config['host'], path=path)
## Instruction:
Remove logging and allow anonymous access (for Crucible for example)
## Code After:
import json
import requests
import plugins.settings as settings
headers = {'accept': 'application/json'}
def get(config, path, data=None):
auth = None
if 'username' in config and 'password' in config:
auth = (config['username'], config['password'])
request = requests.get(
url=__format_url(config, path),
params=data,
headers=headers,
auth=auth,
verify=settings.servers.verify_ssl)
return request
def delete(config, path, data):
request = requests.delete(
url=__format_url(config, path),
data=json.dumps(data),
headers={
'Content-type': 'application/json',
'Accept': 'application/json'
},
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
return request
def post(config, path, data=None):
request = requests.post(
url=__format_url(config, path),
data=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
return request
def __format_url(config, path):
return '{server}{path}'.format(server=config['host'], path=path)
|
import json
- import logging
import requests
import plugins.settings as settings
headers = {'accept': 'application/json'}
def get(config, path, data=None):
+ auth = None
+ if 'username' in config and 'password' in config:
+ auth = (config['username'], config['password'])
+
request = requests.get(
url=__format_url(config, path),
params=data,
headers=headers,
- auth=(config['username'], config['password']),
+ auth=auth,
verify=settings.servers.verify_ssl)
-
- logging.debug('GET %s - Response %s - Data %s'
- % (request.url, request.status_code, data))
return request
def delete(config, path, data):
request = requests.delete(
url=__format_url(config, path),
data=json.dumps(data),
headers={
'Content-type': 'application/json',
'Accept': 'application/json'
},
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
- logging.debug('DELETE %s - Response %s - Data %s'
- % (request.url, request.status_code, data))
-
return request
def post(config, path, data=None):
request = requests.post(
url=__format_url(config, path),
data=data,
headers=headers,
auth=(config['username'], config['password']),
verify=settings.servers.verify_ssl)
- logging.debug('POST %s - Response %s - Data %s'
- % (request.url, request.status_code, data))
-
return request
def __format_url(config, path):
return '{server}{path}'.format(server=config['host'], path=path)
|
ccdb064c0523e9293dca13adefa13d155d372505
|
spotifyconnect/sink.py
|
spotifyconnect/sink.py
|
from __future__ import unicode_literals
import spotifyconnect
class Sink(object):
def on(self):
"""Turn on the alsa_sink sink.
This is done automatically when the sink is instantiated, so you'll
only need to call this method if you ever call :meth:`off` and want to
turn the sink back on.
"""
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
spotifyconnect._session_instance.player.on(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close()
def _on_music_delivery(self, audio_format, frames, num_frames, pending, session):
# This method is called from an internal libspotify thread and must
# not block in any way.
raise NotImplementedError
def _close(self):
pass
|
from __future__ import unicode_literals
import spotifyconnect
__all__ = [
'Sink'
]
class Sink(object):
def on(self):
"""Turn on the alsa_sink sink.
This is done automatically when the sink is instantiated, so you'll
only need to call this method if you ever call :meth:`off` and want to
turn the sink back on.
"""
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
spotifyconnect._session_instance.player.on(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close()
def _on_music_delivery(self, audio_format, frames, num_frames, pending, session):
# This method is called from an internal libspotify thread and must
# not block in any way.
raise NotImplementedError
def _close(self):
pass
|
Add Sink class to initial spotify-connect import
|
Add Sink class to initial spotify-connect import
|
Python
|
apache-2.0
|
chukysoria/pyspotify-connect,chukysoria/pyspotify-connect
|
from __future__ import unicode_literals
import spotifyconnect
+ __all__ = [
+ 'Sink'
+ ]
class Sink(object):
def on(self):
"""Turn on the alsa_sink sink.
This is done automatically when the sink is instantiated, so you'll
only need to call this method if you ever call :meth:`off` and want to
turn the sink back on.
"""
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
spotifyconnect._session_instance.player.on(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close()
def _on_music_delivery(self, audio_format, frames, num_frames, pending, session):
# This method is called from an internal libspotify thread and must
# not block in any way.
raise NotImplementedError
def _close(self):
pass
|
Add Sink class to initial spotify-connect import
|
## Code Before:
from __future__ import unicode_literals
import spotifyconnect
class Sink(object):
def on(self):
"""Turn on the alsa_sink sink.
This is done automatically when the sink is instantiated, so you'll
only need to call this method if you ever call :meth:`off` and want to
turn the sink back on.
"""
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
spotifyconnect._session_instance.player.on(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close()
def _on_music_delivery(self, audio_format, frames, num_frames, pending, session):
# This method is called from an internal libspotify thread and must
# not block in any way.
raise NotImplementedError
def _close(self):
pass
## Instruction:
Add Sink class to initial spotify-connect import
## Code After:
from __future__ import unicode_literals
import spotifyconnect
__all__ = [
'Sink'
]
class Sink(object):
def on(self):
"""Turn on the alsa_sink sink.
This is done automatically when the sink is instantiated, so you'll
only need to call this method if you ever call :meth:`off` and want to
turn the sink back on.
"""
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
spotifyconnect._session_instance.player.on(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close()
def _on_music_delivery(self, audio_format, frames, num_frames, pending, session):
# This method is called from an internal libspotify thread and must
# not block in any way.
raise NotImplementedError
def _close(self):
pass
|
from __future__ import unicode_literals
import spotifyconnect
+ __all__ = [
+ 'Sink'
+ ]
class Sink(object):
def on(self):
"""Turn on the alsa_sink sink.
This is done automatically when the sink is instantiated, so you'll
only need to call this method if you ever call :meth:`off` and want to
turn the sink back on.
"""
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
spotifyconnect._session_instance.player.on(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
def off(self):
"""Turn off the alsa_sink sink.
This disconnects the sink from the relevant session events.
"""
spotifyconnect._session_instance.player.off(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)
assert spotifyconnect._session_instance.player.num_listeners(
spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0
self._close()
def _on_music_delivery(self, audio_format, frames, num_frames, pending, session):
# This method is called from an internal libspotify thread and must
# not block in any way.
raise NotImplementedError
def _close(self):
pass
|
92595871f908aa22d353a2490f851da23f3d1f64
|
gitcd/Config/FilePersonal.py
|
gitcd/Config/FilePersonal.py
|
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
# add .gitcd-personal to .gitignore
gitignore = ".gitignore"
if not os.path.isfile(gitignore):
gitignoreContent = self.filename
else:
with open(gitignore, "r") as gitignoreFile:
gitignoreContent = gitignoreFile.read()
# if not yet in gitignore
if "\n%s\n" % (self.filename) not in gitignoreContent:
# add it
gitignoreContent = "%s\n%s\n" % (gitignoreContent, self.filename)
with open(gitignore, "w") as gitignoreFile:
gitignoreFile.write(gitignoreContent)
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
Add .gitcd-personal to .gitignore automaticly
|
Add .gitcd-personal to .gitignore automaticly
|
Python
|
apache-2.0
|
claudio-walser/gitcd,claudio-walser/gitcd
|
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
+ # add .gitcd-personal to .gitignore
+ gitignore = ".gitignore"
+ if not os.path.isfile(gitignore):
+ gitignoreContent = self.filename
+ else:
+ with open(gitignore, "r") as gitignoreFile:
+ gitignoreContent = gitignoreFile.read()
+ # if not yet in gitignore
+ if "\n%s\n" % (self.filename) not in gitignoreContent:
+ # add it
+ gitignoreContent = "%s\n%s\n" % (gitignoreContent, self.filename)
+
+
+ with open(gitignore, "w") as gitignoreFile:
+ gitignoreFile.write(gitignoreContent)
+
+
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
Add .gitcd-personal to .gitignore automaticly
|
## Code Before:
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
## Instruction:
Add .gitcd-personal to .gitignore automaticly
## Code After:
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
# add .gitcd-personal to .gitignore
gitignore = ".gitignore"
if not os.path.isfile(gitignore):
gitignoreContent = self.filename
else:
with open(gitignore, "r") as gitignoreFile:
gitignoreContent = gitignoreFile.read()
# if not yet in gitignore
if "\n%s\n" % (self.filename) not in gitignoreContent:
# add it
gitignoreContent = "%s\n%s\n" % (gitignoreContent, self.filename)
with open(gitignore, "w") as gitignoreFile:
gitignoreFile.write(gitignoreContent)
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
import os
import yaml
from gitcd.Config.Parser import Parser
from gitcd.Config.DefaultsPersonal import DefaultsPersonal
class FilePersonal:
loaded = False
filename = ".gitcd-personal"
parser = Parser()
defaults = DefaultsPersonal()
config = False
def setFilename(self, configFilename: str):
self.filename = configFilename
def load(self):
if not os.path.isfile(self.filename):
self.config = self.defaults.load()
else:
self.config = self.parser.load(self.filename)
def write(self):
self.parser.write(self.filename, self.config)
+ # add .gitcd-personal to .gitignore
+ gitignore = ".gitignore"
+ if not os.path.isfile(gitignore):
+ gitignoreContent = self.filename
+ else:
+ with open(gitignore, "r") as gitignoreFile:
+ gitignoreContent = gitignoreFile.read()
+ # if not yet in gitignore
+ if "\n%s\n" % (self.filename) not in gitignoreContent:
+ # add it
+ gitignoreContent = "%s\n%s\n" % (gitignoreContent, self.filename)
+
+
+ with open(gitignore, "w") as gitignoreFile:
+ gitignoreFile.write(gitignoreContent)
+
+
def getToken(self):
return self.config['token']
def setToken(self, token):
self.config['token'] = token
|
a3ee74b3b7cba17e013b549f0ed56587cfc65331
|
rnacentral/nhmmer/urls.py
|
rnacentral/nhmmer/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
Use spaces instead of tabs
|
Use spaces instead of tabs
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
- url(r'^get-results/?$',
+ url(r'^get-results/?$',
- views.ResultsView.as_view(),
+ views.ResultsView.as_view(),
- name='nhmmer-job-results'),
+ name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
Use spaces instead of tabs
|
## Code Before:
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
## Instruction:
Use spaces instead of tabs
## Code After:
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
- url(r'^get-results/?$',
? ^
+ url(r'^get-results/?$',
? ^^^^
- views.ResultsView.as_view(),
? ^^
+ views.ResultsView.as_view(),
? ^^^^^^^^
- name='nhmmer-job-results'),
? ^^
+ name='nhmmer-job-results'),
? ^^^^^^^^
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
314f387e3a227181926531f5230f21887d35038b
|
uploader/uploader.py
|
uploader/uploader.py
|
import os
import glob
import logging
import dropbox
from dropbox.client import DropboxClient, ErrorResponse
import settings
from settings import DROPBOX_TOKEN_FILE
def load_dropbox_token():
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
return dropbox_token
def has_valid_dropbox_token():
try:
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
client = dropbox.client.DropboxClient(dropbox_token)
client.account_info()
except (IOError, ErrorResponse):
return False
return True
def get_files_to_upload():
return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg")
def upload_file(path):
access_token = load_dropbox_token()
client = DropboxClient(access_token)
name = path.split("/")[-1]
with open(path, 'rb') as data:
try:
client.put_file(name, data)
except Exception as e:
logging.exception(e)
else:
os.remove(path)
|
import os
import glob
import logging
import subprocess
import dropbox
from dropbox.client import DropboxClient, ErrorResponse
import settings
from settings import DROPBOX_TOKEN_FILE
def load_dropbox_token():
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
return dropbox_token
def has_valid_dropbox_token():
try:
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
client = dropbox.client.DropboxClient(dropbox_token)
client.account_info()
except (IOError, ErrorResponse):
return False
return True
def get_files_to_upload():
return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg")
def upload_file(path):
access_token = load_dropbox_token()
client = DropboxClient(access_token)
name = path.split("/")[-1]
with open(path, 'rb') as data:
try:
client.put_file(name, data)
except Exception as e:
logging.exception(e)
else:
os.remove(path)
def has_network_conntection(self):
command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com']
try:
subprocess.check_output(command)
return True
except:
return False
|
Add util to test network connection
|
Add util to test network connection
|
Python
|
mit
|
projectweekend/Pi-Camera-Time-Lapse,projectweekend/Pi-Camera-Time-Lapse
|
import os
import glob
import logging
+ import subprocess
import dropbox
from dropbox.client import DropboxClient, ErrorResponse
import settings
from settings import DROPBOX_TOKEN_FILE
def load_dropbox_token():
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
return dropbox_token
def has_valid_dropbox_token():
try:
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
client = dropbox.client.DropboxClient(dropbox_token)
client.account_info()
except (IOError, ErrorResponse):
return False
return True
def get_files_to_upload():
return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg")
def upload_file(path):
access_token = load_dropbox_token()
client = DropboxClient(access_token)
name = path.split("/")[-1]
with open(path, 'rb') as data:
try:
client.put_file(name, data)
except Exception as e:
logging.exception(e)
else:
os.remove(path)
+
+ def has_network_conntection(self):
+ command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com']
+ try:
+ subprocess.check_output(command)
+ return True
+ except:
+ return False
+
|
Add util to test network connection
|
## Code Before:
import os
import glob
import logging
import dropbox
from dropbox.client import DropboxClient, ErrorResponse
import settings
from settings import DROPBOX_TOKEN_FILE
def load_dropbox_token():
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
return dropbox_token
def has_valid_dropbox_token():
try:
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
client = dropbox.client.DropboxClient(dropbox_token)
client.account_info()
except (IOError, ErrorResponse):
return False
return True
def get_files_to_upload():
return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg")
def upload_file(path):
access_token = load_dropbox_token()
client = DropboxClient(access_token)
name = path.split("/")[-1]
with open(path, 'rb') as data:
try:
client.put_file(name, data)
except Exception as e:
logging.exception(e)
else:
os.remove(path)
## Instruction:
Add util to test network connection
## Code After:
import os
import glob
import logging
import subprocess
import dropbox
from dropbox.client import DropboxClient, ErrorResponse
import settings
from settings import DROPBOX_TOKEN_FILE
def load_dropbox_token():
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
return dropbox_token
def has_valid_dropbox_token():
try:
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
client = dropbox.client.DropboxClient(dropbox_token)
client.account_info()
except (IOError, ErrorResponse):
return False
return True
def get_files_to_upload():
return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg")
def upload_file(path):
access_token = load_dropbox_token()
client = DropboxClient(access_token)
name = path.split("/")[-1]
with open(path, 'rb') as data:
try:
client.put_file(name, data)
except Exception as e:
logging.exception(e)
else:
os.remove(path)
def has_network_conntection(self):
command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com']
try:
subprocess.check_output(command)
return True
except:
return False
|
import os
import glob
import logging
+ import subprocess
import dropbox
from dropbox.client import DropboxClient, ErrorResponse
import settings
from settings import DROPBOX_TOKEN_FILE
def load_dropbox_token():
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
return dropbox_token
def has_valid_dropbox_token():
try:
with open(DROPBOX_TOKEN_FILE, 'r') as f:
dropbox_token = f.read()
client = dropbox.client.DropboxClient(dropbox_token)
client.account_info()
except (IOError, ErrorResponse):
return False
return True
def get_files_to_upload():
return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg")
def upload_file(path):
access_token = load_dropbox_token()
client = DropboxClient(access_token)
name = path.split("/")[-1]
with open(path, 'rb') as data:
try:
client.put_file(name, data)
except Exception as e:
logging.exception(e)
else:
os.remove(path)
+
+
+ def has_network_conntection(self):
+ command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com']
+ try:
+ subprocess.check_output(command)
+ return True
+ except:
+ return False
|
ae433a0ed222d3540581b2b49c9a49a8ad16819c
|
wagtailaltgenerator/translation_providers/google_translate.py
|
wagtailaltgenerator/translation_providers/google_translate.py
|
import logging
from . import AbstractTranslationProvider
from google.cloud import translate
logger = logging.getLogger(__name__)
class GoogleTranslate(AbstractTranslationProvider):
def translate(self, strings, target_language, source_language="en"):
client = translate.Client()
response = client.translate(
strings, source_language=source_language, target_language=target_language
)
return list(map(lambda x: x["translatedText"], response))
|
import logging
from . import AbstractTranslationProvider
logger = logging.getLogger(__name__)
class GoogleTranslate(AbstractTranslationProvider):
def translate(self, strings, target_language, source_language="en"):
from google.cloud import translate
client = translate.Client()
response = client.translate(
strings, source_language=source_language, target_language=target_language
)
return list(map(lambda x: x["translatedText"], response))
|
Enable test mocking for translate
|
Enable test mocking for translate
|
Python
|
mit
|
marteinn/wagtail-alt-generator,marteinn/wagtail-alt-generator,marteinn/wagtail-alt-generator
|
import logging
from . import AbstractTranslationProvider
- from google.cloud import translate
logger = logging.getLogger(__name__)
class GoogleTranslate(AbstractTranslationProvider):
def translate(self, strings, target_language, source_language="en"):
+ from google.cloud import translate
+
client = translate.Client()
response = client.translate(
strings, source_language=source_language, target_language=target_language
)
return list(map(lambda x: x["translatedText"], response))
|
Enable test mocking for translate
|
## Code Before:
import logging
from . import AbstractTranslationProvider
from google.cloud import translate
logger = logging.getLogger(__name__)
class GoogleTranslate(AbstractTranslationProvider):
def translate(self, strings, target_language, source_language="en"):
client = translate.Client()
response = client.translate(
strings, source_language=source_language, target_language=target_language
)
return list(map(lambda x: x["translatedText"], response))
## Instruction:
Enable test mocking for translate
## Code After:
import logging
from . import AbstractTranslationProvider
logger = logging.getLogger(__name__)
class GoogleTranslate(AbstractTranslationProvider):
def translate(self, strings, target_language, source_language="en"):
from google.cloud import translate
client = translate.Client()
response = client.translate(
strings, source_language=source_language, target_language=target_language
)
return list(map(lambda x: x["translatedText"], response))
|
import logging
from . import AbstractTranslationProvider
- from google.cloud import translate
logger = logging.getLogger(__name__)
class GoogleTranslate(AbstractTranslationProvider):
def translate(self, strings, target_language, source_language="en"):
+ from google.cloud import translate
+
client = translate.Client()
response = client.translate(
strings, source_language=source_language, target_language=target_language
)
return list(map(lambda x: x["translatedText"], response))
|
1aab2f41191d3de0b7bade31cdf83ae14be9dc2a
|
Lib/test/test_copy_reg.py
|
Lib/test/test_copy_reg.py
|
import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
|
import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
|
Convert copy_reg test to PyUnit.
|
Convert copy_reg test to PyUnit.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
import copy_reg
+ import test_support
+ import unittest
+
class C:
pass
- try:
- copy_reg.pickle(C, None, None)
- except TypeError, e:
- print "Caught expected TypeError:"
- print e
- else:
- print "Failed to catch expected TypeError when registering a class type."
+ class CopyRegTestCase(unittest.TestCase):
+
+ def test_class(self):
+ self.assertRaises(TypeError, copy_reg.pickle,
+ C, None, None)
+
+ def test_noncallable_reduce(self):
+ self.assertRaises(TypeError, copy_reg.pickle,
+ type(1), "not a callable")
+
+ def test_noncallable_constructor(self):
+ self.assertRaises(TypeError, copy_reg.pickle,
+ type(1), int, "not a callable")
+ test_support.run_unittest(CopyRegTestCase)
- print
- try:
- copy_reg.pickle(type(1), "not a callable")
- except TypeError, e:
- print "Caught expected TypeError:"
- print e
- else:
- print "Failed to catch TypeError " \
- "when registering a non-callable reduction function."
-
- print
- try:
- copy_reg.pickle(type(1), int, "not a callable")
- except TypeError, e:
- print "Caught expected TypeError:"
- print e
- else:
- print "Failed to catch TypeError " \
- "when registering a non-callable constructor."
-
|
Convert copy_reg test to PyUnit.
|
## Code Before:
import copy_reg
class C:
pass
try:
copy_reg.pickle(C, None, None)
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch expected TypeError when registering a class type."
print
try:
copy_reg.pickle(type(1), "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable reduction function."
print
try:
copy_reg.pickle(type(1), int, "not a callable")
except TypeError, e:
print "Caught expected TypeError:"
print e
else:
print "Failed to catch TypeError " \
"when registering a non-callable constructor."
## Instruction:
Convert copy_reg test to PyUnit.
## Code After:
import copy_reg
import test_support
import unittest
class C:
pass
class CopyRegTestCase(unittest.TestCase):
def test_class(self):
self.assertRaises(TypeError, copy_reg.pickle,
C, None, None)
def test_noncallable_reduce(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), "not a callable")
def test_noncallable_constructor(self):
self.assertRaises(TypeError, copy_reg.pickle,
type(1), int, "not a callable")
test_support.run_unittest(CopyRegTestCase)
|
import copy_reg
+ import test_support
+ import unittest
+
class C:
pass
- try:
- copy_reg.pickle(C, None, None)
- except TypeError, e:
- print "Caught expected TypeError:"
- print e
- else:
- print "Failed to catch expected TypeError when registering a class type."
+ class CopyRegTestCase(unittest.TestCase):
+
+ def test_class(self):
+ self.assertRaises(TypeError, copy_reg.pickle,
+ C, None, None)
+
+ def test_noncallable_reduce(self):
+ self.assertRaises(TypeError, copy_reg.pickle,
+ type(1), "not a callable")
+
+ def test_noncallable_constructor(self):
+ self.assertRaises(TypeError, copy_reg.pickle,
+ type(1), int, "not a callable")
+ test_support.run_unittest(CopyRegTestCase)
- print
- try:
- copy_reg.pickle(type(1), "not a callable")
- except TypeError, e:
- print "Caught expected TypeError:"
- print e
- else:
- print "Failed to catch TypeError " \
- "when registering a non-callable reduction function."
-
-
- print
- try:
- copy_reg.pickle(type(1), int, "not a callable")
- except TypeError, e:
- print "Caught expected TypeError:"
- print e
- else:
- print "Failed to catch TypeError " \
- "when registering a non-callable constructor."
|
cc2a600a7a68e438aa1dceb43f40c7ccd61b5df9
|
apps/innovate/views.py
|
apps/innovate/views.py
|
import jingo
from innovate.utils import get_blog_feed_entries
from projects.models import Project
def splash(request):
"""Display splash page. With featured projects and news feed."""
projects = Project.objects.filter(featured=True, inactive=False)[:4]
return jingo.render(request, 'innovate/splash.html', {
'featured_projects': projects,
'blog_entries': get_blog_feed_entries(),
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
import jingo
from innovate.utils import get_blog_feed_entries
from projects.models import Project
def splash(request):
"""Display splash page. With featured projects and news feed."""
# refresh cache if requested
force = request.META.get('HTTP_CACHE_CONTROL') == 'no-cache'
entries = get_blog_feed_entries(force_update=force)
projects = Project.objects.filter(featured=True, inactive=False)[:4]
return jingo.render(request, 'innovate/splash.html', {
'featured_projects': projects,
'blog_entries': entries,
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
Add shift-refresh to update HP cache on demand.
|
Add shift-refresh to update HP cache on demand.
|
Python
|
bsd-3-clause
|
mozilla/betafarm,mozilla/betafarm,mozilla/betafarm,mozilla/betafarm
|
import jingo
from innovate.utils import get_blog_feed_entries
from projects.models import Project
def splash(request):
"""Display splash page. With featured projects and news feed."""
+ # refresh cache if requested
+ force = request.META.get('HTTP_CACHE_CONTROL') == 'no-cache'
+ entries = get_blog_feed_entries(force_update=force)
projects = Project.objects.filter(featured=True, inactive=False)[:4]
return jingo.render(request, 'innovate/splash.html', {
'featured_projects': projects,
- 'blog_entries': get_blog_feed_entries(),
+ 'blog_entries': entries,
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
Add shift-refresh to update HP cache on demand.
|
## Code Before:
import jingo
from innovate.utils import get_blog_feed_entries
from projects.models import Project
def splash(request):
"""Display splash page. With featured projects and news feed."""
projects = Project.objects.filter(featured=True, inactive=False)[:4]
return jingo.render(request, 'innovate/splash.html', {
'featured_projects': projects,
'blog_entries': get_blog_feed_entries(),
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
## Instruction:
Add shift-refresh to update HP cache on demand.
## Code After:
import jingo
from innovate.utils import get_blog_feed_entries
from projects.models import Project
def splash(request):
"""Display splash page. With featured projects and news feed."""
# refresh cache if requested
force = request.META.get('HTTP_CACHE_CONTROL') == 'no-cache'
entries = get_blog_feed_entries(force_update=force)
projects = Project.objects.filter(featured=True, inactive=False)[:4]
return jingo.render(request, 'innovate/splash.html', {
'featured_projects': projects,
'blog_entries': entries,
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
import jingo
from innovate.utils import get_blog_feed_entries
from projects.models import Project
def splash(request):
"""Display splash page. With featured projects and news feed."""
+ # refresh cache if requested
+ force = request.META.get('HTTP_CACHE_CONTROL') == 'no-cache'
+ entries = get_blog_feed_entries(force_update=force)
projects = Project.objects.filter(featured=True, inactive=False)[:4]
return jingo.render(request, 'innovate/splash.html', {
'featured_projects': projects,
- 'blog_entries': get_blog_feed_entries(),
? -------------- --
+ 'blog_entries': entries,
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
ff6502fd8ecc4ee28cb05cb7ee8f11f75240ce47
|
mini_project.py
|
mini_project.py
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
|
Make the difference between PV names and values clearer.
|
Make the difference between PV names and values clearer.
|
Python
|
apache-2.0
|
razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
- # print(BPMS[BPM].pv())
+ pvs = BPMS[BPM].pv()
- print caget(BPMS[BPM].pv())
+ print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
|
Make the difference between PV names and values clearer.
|
## Code Before:
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
## Instruction:
Make the difference between PV names and values clearer.
## Code After:
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
- # print(BPMS[BPM].pv())
? - ^^^^^ -
+ pvs = BPMS[BPM].pv()
? ^^^^^
- print caget(BPMS[BPM].pv())
+ print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
|
ac3edaab39a32d4108ec04746358f833d3dee7ca
|
convert_caffe_to_chainer.py
|
convert_caffe_to_chainer.py
|
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
|
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
Add input file name and output file name setting function
|
Add input file name and output file name setting function
|
Python
|
mit
|
karaage0703/deeplearning-learning
|
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
- import_model = "bvlc_googlenet.caffemodel"
+ # import_model = "bvlc_googlenet.caffemodel"
+ #
+ # print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
+ #
+ # model = caffe.CaffeFunction(import_model)
+ # print('Loaded', file=sys.stderr)
+ #
+ #
+ # pickle.dump(model, open('chainer.pkl', 'wb'), -1)
+ # print('Convert is done')
- print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
+ if __name__ == '__main__':
+ param = sys.argv
+ if (len(param) != 3):
+ print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
+ quit()
- model = caffe.CaffeFunction(import_model)
+ print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
+ model = caffe.CaffeFunction(param[1])
- print('Loaded', file=sys.stderr)
+ print('Loaded', file=sys.stderr)
+
+ print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
+ pickle.dump(model, open(param[2], 'wb'), -1)
+ print('Convert is done')
- pickle.dump(model, open('chainer.pkl', 'wb'), -1)
- print('Convert is done')
|
Add input file name and output file name setting function
|
## Code Before:
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
import_model = "bvlc_googlenet.caffemodel"
print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
model = caffe.CaffeFunction(import_model)
print('Loaded', file=sys.stderr)
pickle.dump(model, open('chainer.pkl', 'wb'), -1)
print('Convert is done')
## Instruction:
Add input file name and output file name setting function
## Code After:
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
# import_model = "bvlc_googlenet.caffemodel"
#
# print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
#
# model = caffe.CaffeFunction(import_model)
# print('Loaded', file=sys.stderr)
#
#
# pickle.dump(model, open('chainer.pkl', 'wb'), -1)
# print('Convert is done')
if __name__ == '__main__':
param = sys.argv
if (len(param) != 3):
print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
quit()
print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
model = caffe.CaffeFunction(param[1])
print('Loaded', file=sys.stderr)
print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
pickle.dump(model, open(param[2], 'wb'), -1)
print('Convert is done')
|
from __future__ import print_function
import sys
from chainer.functions import caffe
import cPickle as pickle
- import_model = "bvlc_googlenet.caffemodel"
+ # import_model = "bvlc_googlenet.caffemodel"
? ++
+ #
+ # print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
+ #
+ # model = caffe.CaffeFunction(import_model)
+ # print('Loaded', file=sys.stderr)
+ #
+ #
+ # pickle.dump(model, open('chainer.pkl', 'wb'), -1)
+ # print('Convert is done')
- print('Loading Caffe model file %s...' % import_model, file=sys.stderr)
+ if __name__ == '__main__':
+ param = sys.argv
+ if (len(param) != 3):
+ print ("Usage: $ python " + param[0] + " modelname.caffemodel chainermodel.pkl")
+ quit()
- model = caffe.CaffeFunction(import_model)
+ print('Loading Caffe model file %s...' % param[1], file=sys.stderr)
+ model = caffe.CaffeFunction(param[1])
- print('Loaded', file=sys.stderr)
+ print('Loaded', file=sys.stderr)
? ++++
+
+ print('Converting from Caffe to Chainer model file %s...' % param[2], file=sys.stderr)
+ pickle.dump(model, open(param[2], 'wb'), -1)
+ print('Convert is done')
- pickle.dump(model, open('chainer.pkl', 'wb'), -1)
- print('Convert is done')
|
d6da05f79d62f90d8d03908197a0389b67535aa5
|
halfedge_mesh.py
|
halfedge_mesh.py
|
class HalfedgeMesh:
def __init__(self, filename=None):
"""Make an empty halfedge mesh."""
self.vertices = []
self.halfedges = []
self.facets = []
def read_off(self, filename):
class Vertex:
def __init__(self, x, y, z, index):
"""Create a vertex with given index at given point.
Args:
x: x-coordinate of the point
y: y-coordinate of the point
z: z-coordinate of the point
index: integer id of this vertex
"""
pass
def halfedges(self):
"""Return a list of halfedges targeting to this vertex."""
pass
class Facet:
def __init__(self, index):
"""Create a facet with the given index."""
pass
def halfedges(self):
"""Return halfedges going ccw around this facet."""
pass
class Halfedge:
def __init__(self, index):
"""Create a halfedge with given index."""
pass
def opposite(self):
"""Return the opposite halfedge."""
pass
def next(self):
"""Return the opposite halfedge."""
pass
def prev(self):
"""Return the opposite halfedge."""
pass
def vertex(self):
"""Return the target vertex."""
pass
def facet(self):
"""Return the incident facet."""
pass
if __name__ == '__main__':
m = HalfedgeMesh()
|
class HalfedgeMesh:
def __init__(self, filename=None):
"""Make an empty halfedge mesh."""
self.vertices = []
self.halfedges = []
self.facets = []
def parse_off(self, filename):
"""Parses OFF files and returns a set of vertices, halfedges, and
facets.
"""
pass
def get_halfedge(self, u, v):
"""Retrieve halfedge with starting vertex u and target vertex v
u - starting vertex
v - target vertex
Returns a halfedge
"""
pass
class Vertex:
def __init__(self, x, y, z, index):
"""Create a vertex with given index at given point.
x - x-coordinate of the point
y - y-coordinate of the point
z - z-coordinate of the point
index - integer id of this vertex
"""
pass
def halfedges(self):
"""Return a list of halfedges targeting to this vertex."""
pass
class Facet:
def __init__(self, index):
"""Create a facet with the given index."""
pass
def halfedges(self):
"""Return halfedges going ccw around this facet."""
pass
class Halfedge:
def __init__(self, index):
"""Create a halfedge with given index."""
pass
def opposite(self):
"""Return the opposite halfedge."""
pass
def next(self):
"""Return the opposite halfedge."""
pass
def prev(self):
"""Return the opposite halfedge."""
pass
def vertex(self):
"""Return the target vertex."""
pass
def facet(self):
"""Return the incident facet."""
pass
if __name__ == '__main__':
m = HalfedgeMesh()
|
Add parse_off stub and change docstring
|
Add parse_off stub and change docstring
I follow the TomDoc format for docstrings.
|
Python
|
mit
|
carlosrojas/halfedge_mesh
|
class HalfedgeMesh:
def __init__(self, filename=None):
"""Make an empty halfedge mesh."""
self.vertices = []
self.halfedges = []
self.facets = []
- def read_off(self, filename):
+ def parse_off(self, filename):
+ """Parses OFF files and returns a set of vertices, halfedges, and
+ facets.
+ """
+ pass
+
+ def get_halfedge(self, u, v):
+ """Retrieve halfedge with starting vertex u and target vertex v
+
+ u - starting vertex
+ v - target vertex
+
+ Returns a halfedge
+ """
+ pass
class Vertex:
def __init__(self, x, y, z, index):
"""Create a vertex with given index at given point.
- Args:
- x: x-coordinate of the point
+ x - x-coordinate of the point
- y: y-coordinate of the point
+ y - y-coordinate of the point
- z: z-coordinate of the point
+ z - z-coordinate of the point
- index: integer id of this vertex
+ index - integer id of this vertex
"""
pass
def halfedges(self):
"""Return a list of halfedges targeting to this vertex."""
pass
class Facet:
def __init__(self, index):
"""Create a facet with the given index."""
pass
def halfedges(self):
"""Return halfedges going ccw around this facet."""
pass
class Halfedge:
def __init__(self, index):
"""Create a halfedge with given index."""
pass
def opposite(self):
"""Return the opposite halfedge."""
pass
def next(self):
"""Return the opposite halfedge."""
pass
def prev(self):
"""Return the opposite halfedge."""
pass
def vertex(self):
"""Return the target vertex."""
pass
def facet(self):
"""Return the incident facet."""
pass
if __name__ == '__main__':
m = HalfedgeMesh()
|
Add parse_off stub and change docstring
|
## Code Before:
class HalfedgeMesh:
def __init__(self, filename=None):
"""Make an empty halfedge mesh."""
self.vertices = []
self.halfedges = []
self.facets = []
def read_off(self, filename):
class Vertex:
def __init__(self, x, y, z, index):
"""Create a vertex with given index at given point.
Args:
x: x-coordinate of the point
y: y-coordinate of the point
z: z-coordinate of the point
index: integer id of this vertex
"""
pass
def halfedges(self):
"""Return a list of halfedges targeting to this vertex."""
pass
class Facet:
def __init__(self, index):
"""Create a facet with the given index."""
pass
def halfedges(self):
"""Return halfedges going ccw around this facet."""
pass
class Halfedge:
def __init__(self, index):
"""Create a halfedge with given index."""
pass
def opposite(self):
"""Return the opposite halfedge."""
pass
def next(self):
"""Return the opposite halfedge."""
pass
def prev(self):
"""Return the opposite halfedge."""
pass
def vertex(self):
"""Return the target vertex."""
pass
def facet(self):
"""Return the incident facet."""
pass
if __name__ == '__main__':
m = HalfedgeMesh()
## Instruction:
Add parse_off stub and change docstring
## Code After:
class HalfedgeMesh:
def __init__(self, filename=None):
"""Make an empty halfedge mesh."""
self.vertices = []
self.halfedges = []
self.facets = []
def parse_off(self, filename):
"""Parses OFF files and returns a set of vertices, halfedges, and
facets.
"""
pass
def get_halfedge(self, u, v):
"""Retrieve halfedge with starting vertex u and target vertex v
u - starting vertex
v - target vertex
Returns a halfedge
"""
pass
class Vertex:
def __init__(self, x, y, z, index):
"""Create a vertex with given index at given point.
x - x-coordinate of the point
y - y-coordinate of the point
z - z-coordinate of the point
index - integer id of this vertex
"""
pass
def halfedges(self):
"""Return a list of halfedges targeting to this vertex."""
pass
class Facet:
def __init__(self, index):
"""Create a facet with the given index."""
pass
def halfedges(self):
"""Return halfedges going ccw around this facet."""
pass
class Halfedge:
def __init__(self, index):
"""Create a halfedge with given index."""
pass
def opposite(self):
"""Return the opposite halfedge."""
pass
def next(self):
"""Return the opposite halfedge."""
pass
def prev(self):
"""Return the opposite halfedge."""
pass
def vertex(self):
"""Return the target vertex."""
pass
def facet(self):
"""Return the incident facet."""
pass
if __name__ == '__main__':
m = HalfedgeMesh()
|
class HalfedgeMesh:
def __init__(self, filename=None):
"""Make an empty halfedge mesh."""
self.vertices = []
self.halfedges = []
self.facets = []
- def read_off(self, filename):
? --
+ def parse_off(self, filename):
? ++ +
+ """Parses OFF files and returns a set of vertices, halfedges, and
+ facets.
+ """
+ pass
+
+ def get_halfedge(self, u, v):
+ """Retrieve halfedge with starting vertex u and target vertex v
+
+ u - starting vertex
+ v - target vertex
+
+ Returns a halfedge
+ """
+ pass
class Vertex:
def __init__(self, x, y, z, index):
"""Create a vertex with given index at given point.
- Args:
- x: x-coordinate of the point
- y: y-coordinate of the point
? -- ^
+ x - x-coordinate of the point
? + ^^^
- z: z-coordinate of the point
? -- ^
+ y - y-coordinate of the point
? + ^^^
+ z - z-coordinate of the point
- index: integer id of this vertex
? ---- ^
+ index - integer id of this vertex
? ^^
"""
pass
def halfedges(self):
"""Return a list of halfedges targeting to this vertex."""
pass
class Facet:
def __init__(self, index):
"""Create a facet with the given index."""
pass
def halfedges(self):
"""Return halfedges going ccw around this facet."""
pass
class Halfedge:
def __init__(self, index):
"""Create a halfedge with given index."""
pass
def opposite(self):
"""Return the opposite halfedge."""
pass
def next(self):
"""Return the opposite halfedge."""
pass
def prev(self):
"""Return the opposite halfedge."""
pass
def vertex(self):
"""Return the target vertex."""
pass
def facet(self):
"""Return the incident facet."""
pass
if __name__ == '__main__':
m = HalfedgeMesh()
|
f48063cfb9674c1e5f1f94e62ff43b239f687abd
|
examples/plot_tot_histogram.py
|
examples/plot_tot_histogram.py
|
# Author: Tamas Gal <[email protected]>
# License: BSD-3
import pandas as pd
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
hits = pd.read_hdf(filename, 'hits', mode='r')
hits.hist("tot", bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
# Author: Tamas Gal <[email protected]>
# License: BSD-3
import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
with tb.File(filename) as f:
tots = f.get_node("/hits/tot")[:]
plt.hist(tots, bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
Fix for new km3hdf5 version 4
|
Fix for new km3hdf5 version 4
|
Python
|
mit
|
tamasgal/km3pipe,tamasgal/km3pipe
|
# Author: Tamas Gal <[email protected]>
# License: BSD-3
- import pandas as pd
+ import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
- hits = pd.read_hdf(filename, 'hits', mode='r')
+ with tb.File(filename) as f:
+ tots = f.get_node("/hits/tot")[:]
+
- hits.hist("tot", bins=254, log=True, edgecolor='none')
+ plt.hist(tots, bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
Fix for new km3hdf5 version 4
|
## Code Before:
# Author: Tamas Gal <[email protected]>
# License: BSD-3
import pandas as pd
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
hits = pd.read_hdf(filename, 'hits', mode='r')
hits.hist("tot", bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
## Instruction:
Fix for new km3hdf5 version 4
## Code After:
# Author: Tamas Gal <[email protected]>
# License: BSD-3
import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
with tb.File(filename) as f:
tots = f.get_node("/hits/tot")[:]
plt.hist(tots, bins=254, log=True, edgecolor='none')
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
# Author: Tamas Gal <[email protected]>
# License: BSD-3
- import pandas as pd
+ import tables as tb
import matplotlib.pyplot as plt
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5"
- hits = pd.read_hdf(filename, 'hits', mode='r')
+ with tb.File(filename) as f:
+ tots = f.get_node("/hits/tot")[:]
+
- hits.hist("tot", bins=254, log=True, edgecolor='none')
? ^^ - - ^
+ plt.hist(tots, bins=254, log=True, edgecolor='none')
? ^^ ^
plt.title("ToT distribution")
plt.xlabel("ToT [ns]")
|
6182fd214580e517ffe8a59ed89037adf7fd2094
|
traits/tests/test_dynamic_trait_definition.py
|
traits/tests/test_dynamic_trait_definition.py
|
from traits.testing.unittest_tools import unittest
from traits.api import Float, HasTraits, Int, List
class Foo(HasTraits):
x = Float
x_changes = List
y_changes = List
def _x_changed(self, new):
self.x_changes.append(new)
def _y_changed(self, new):
self.y_changes.append(new)
class TestDynamicTraitDefinition(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def test_add_trait(self):
foo = Foo(x=3)
foo.add_trait('y', Int)
self.assertTrue(hasattr(foo, 'y'))
self.assertEqual(type(foo.y), int)
foo.y = 4
self.assertEqual(foo.y_changes, [4])
def test_remove_trait(self):
foo = Foo(x=3)
# We can't remove a "statically" added trait (i.e., a trait defined
# in the Foo class).
result = foo.remove_trait('x')
self.assertFalse(result)
# We can remove dynamically added traits.
foo.add_trait('y', Int)
foo.y = 70
result = foo.remove_trait('y')
self.assertTrue(result)
self.assertFalse(hasattr(foo, 'y'))
foo.y = 10
self.assertEqual(foo.y_changes, [70])
|
from traits.testing.unittest_tools import unittest
from traits.api import Float, HasTraits, Int, List
class Foo(HasTraits):
x = Float
y_changes = List
def _y_changed(self, new):
self.y_changes.append(new)
class TestDynamicTraitDefinition(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def test_add_trait(self):
foo = Foo(x=3)
foo.add_trait('y', Int)
self.assertTrue(hasattr(foo, 'y'))
self.assertEqual(type(foo.y), int)
foo.y = 4
self.assertEqual(foo.y_changes, [4])
def test_remove_trait(self):
foo = Foo(x=3)
# We can't remove a "statically" added trait (i.e., a trait defined
# in the Foo class).
result = foo.remove_trait('x')
self.assertFalse(result)
# We can remove dynamically added traits.
foo.add_trait('y', Int)
foo.y = 70
result = foo.remove_trait('y')
self.assertTrue(result)
self.assertFalse(hasattr(foo, 'y'))
foo.y = 10
self.assertEqual(foo.y_changes, [70])
|
Remove unused trait definitions in test.
|
Remove unused trait definitions in test.
|
Python
|
bsd-3-clause
|
burnpanck/traits,burnpanck/traits
|
from traits.testing.unittest_tools import unittest
from traits.api import Float, HasTraits, Int, List
class Foo(HasTraits):
x = Float
- x_changes = List
y_changes = List
-
- def _x_changed(self, new):
- self.x_changes.append(new)
def _y_changed(self, new):
self.y_changes.append(new)
class TestDynamicTraitDefinition(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def test_add_trait(self):
foo = Foo(x=3)
foo.add_trait('y', Int)
self.assertTrue(hasattr(foo, 'y'))
self.assertEqual(type(foo.y), int)
foo.y = 4
self.assertEqual(foo.y_changes, [4])
def test_remove_trait(self):
foo = Foo(x=3)
# We can't remove a "statically" added trait (i.e., a trait defined
# in the Foo class).
result = foo.remove_trait('x')
self.assertFalse(result)
# We can remove dynamically added traits.
foo.add_trait('y', Int)
foo.y = 70
result = foo.remove_trait('y')
self.assertTrue(result)
self.assertFalse(hasattr(foo, 'y'))
foo.y = 10
self.assertEqual(foo.y_changes, [70])
|
Remove unused trait definitions in test.
|
## Code Before:
from traits.testing.unittest_tools import unittest
from traits.api import Float, HasTraits, Int, List
class Foo(HasTraits):
x = Float
x_changes = List
y_changes = List
def _x_changed(self, new):
self.x_changes.append(new)
def _y_changed(self, new):
self.y_changes.append(new)
class TestDynamicTraitDefinition(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def test_add_trait(self):
foo = Foo(x=3)
foo.add_trait('y', Int)
self.assertTrue(hasattr(foo, 'y'))
self.assertEqual(type(foo.y), int)
foo.y = 4
self.assertEqual(foo.y_changes, [4])
def test_remove_trait(self):
foo = Foo(x=3)
# We can't remove a "statically" added trait (i.e., a trait defined
# in the Foo class).
result = foo.remove_trait('x')
self.assertFalse(result)
# We can remove dynamically added traits.
foo.add_trait('y', Int)
foo.y = 70
result = foo.remove_trait('y')
self.assertTrue(result)
self.assertFalse(hasattr(foo, 'y'))
foo.y = 10
self.assertEqual(foo.y_changes, [70])
## Instruction:
Remove unused trait definitions in test.
## Code After:
from traits.testing.unittest_tools import unittest
from traits.api import Float, HasTraits, Int, List
class Foo(HasTraits):
x = Float
y_changes = List
def _y_changed(self, new):
self.y_changes.append(new)
class TestDynamicTraitDefinition(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def test_add_trait(self):
foo = Foo(x=3)
foo.add_trait('y', Int)
self.assertTrue(hasattr(foo, 'y'))
self.assertEqual(type(foo.y), int)
foo.y = 4
self.assertEqual(foo.y_changes, [4])
def test_remove_trait(self):
foo = Foo(x=3)
# We can't remove a "statically" added trait (i.e., a trait defined
# in the Foo class).
result = foo.remove_trait('x')
self.assertFalse(result)
# We can remove dynamically added traits.
foo.add_trait('y', Int)
foo.y = 70
result = foo.remove_trait('y')
self.assertTrue(result)
self.assertFalse(hasattr(foo, 'y'))
foo.y = 10
self.assertEqual(foo.y_changes, [70])
|
from traits.testing.unittest_tools import unittest
from traits.api import Float, HasTraits, Int, List
class Foo(HasTraits):
x = Float
- x_changes = List
y_changes = List
-
- def _x_changed(self, new):
- self.x_changes.append(new)
def _y_changed(self, new):
self.y_changes.append(new)
class TestDynamicTraitDefinition(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def test_add_trait(self):
foo = Foo(x=3)
foo.add_trait('y', Int)
self.assertTrue(hasattr(foo, 'y'))
self.assertEqual(type(foo.y), int)
foo.y = 4
self.assertEqual(foo.y_changes, [4])
def test_remove_trait(self):
foo = Foo(x=3)
# We can't remove a "statically" added trait (i.e., a trait defined
# in the Foo class).
result = foo.remove_trait('x')
self.assertFalse(result)
# We can remove dynamically added traits.
foo.add_trait('y', Int)
foo.y = 70
result = foo.remove_trait('y')
self.assertTrue(result)
self.assertFalse(hasattr(foo, 'y'))
foo.y = 10
self.assertEqual(foo.y_changes, [70])
|
44514a724fc8eff464d4c26a7e9c213644c99e53
|
elasticsearch_flex/tasks.py
|
elasticsearch_flex/tasks.py
|
from celery import shared_task
@shared_task
def update_indexed_document(index, created, pk):
indexed_doc = index.init_using_pk(pk)
indexed_doc.prepare()
indexed_doc.save()
@shared_task
def delete_indexed_document(index, pk):
indexed_doc = index.get(id=pk)
indexed_doc.delete()
__all__ = ('update_indexed_document', 'delete_indexed_document')
|
from celery import shared_task
@shared_task(rate_limit='50/m')
def update_indexed_document(index, created, pk):
indexed_doc = index.init_using_pk(pk)
indexed_doc.prepare()
indexed_doc.save()
@shared_task
def delete_indexed_document(index, pk):
indexed_doc = index.get(id=pk)
indexed_doc.delete()
__all__ = ('update_indexed_document', 'delete_indexed_document')
|
Add rate-limit to update_indexed_document task
|
Add rate-limit to update_indexed_document task
|
Python
|
mit
|
prashnts/dj-elasticsearch-flex,prashnts/dj-elasticsearch-flex
|
from celery import shared_task
- @shared_task
+ @shared_task(rate_limit='50/m')
def update_indexed_document(index, created, pk):
indexed_doc = index.init_using_pk(pk)
indexed_doc.prepare()
indexed_doc.save()
@shared_task
def delete_indexed_document(index, pk):
indexed_doc = index.get(id=pk)
indexed_doc.delete()
__all__ = ('update_indexed_document', 'delete_indexed_document')
|
Add rate-limit to update_indexed_document task
|
## Code Before:
from celery import shared_task
@shared_task
def update_indexed_document(index, created, pk):
indexed_doc = index.init_using_pk(pk)
indexed_doc.prepare()
indexed_doc.save()
@shared_task
def delete_indexed_document(index, pk):
indexed_doc = index.get(id=pk)
indexed_doc.delete()
__all__ = ('update_indexed_document', 'delete_indexed_document')
## Instruction:
Add rate-limit to update_indexed_document task
## Code After:
from celery import shared_task
@shared_task(rate_limit='50/m')
def update_indexed_document(index, created, pk):
indexed_doc = index.init_using_pk(pk)
indexed_doc.prepare()
indexed_doc.save()
@shared_task
def delete_indexed_document(index, pk):
indexed_doc = index.get(id=pk)
indexed_doc.delete()
__all__ = ('update_indexed_document', 'delete_indexed_document')
|
from celery import shared_task
- @shared_task
+ @shared_task(rate_limit='50/m')
def update_indexed_document(index, created, pk):
indexed_doc = index.init_using_pk(pk)
indexed_doc.prepare()
indexed_doc.save()
@shared_task
def delete_indexed_document(index, pk):
indexed_doc = index.get(id=pk)
indexed_doc.delete()
__all__ = ('update_indexed_document', 'delete_indexed_document')
|
2212d1b943987652f4a6a575e3e88dc3e174ce7c
|
eigen/3.2/conanfile.py
|
eigen/3.2/conanfile.py
|
from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
|
from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
|
Add --insecure option to hg clone to avoid self-assigned certificate issue
|
Add --insecure option to hg clone to avoid self-assigned certificate issue
|
Python
|
bsd-2-clause
|
jslee02/conan-dart,jslee02/conan-dart,jslee02/conan-dart
|
from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
- self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
+ self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
|
Add --insecure option to hg clone to avoid self-assigned certificate issue
|
## Code Before:
from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
## Instruction:
Add --insecure option to hg clone to avoid self-assigned certificate issue
## Code After:
from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
|
from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
- self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
+ self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
? +++++++++++
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
|
1cc15cbe37e1118f102f05d9530d6f0a6055d638
|
handler/base_handler.py
|
handler/base_handler.py
|
import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
}
|
import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
def __init__(self, *args, **kwargs):
super(BaseHandler, self).__init__(*args, **kwargs)
self.setup()
def setup(self):
pass
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
}
|
Add setup method to base serf handler
|
Add setup method to base serf handler
|
Python
|
mit
|
waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode
|
import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
+
+ def __init__(self, *args, **kwargs):
+ super(BaseHandler, self).__init__(*args, **kwargs)
+ self.setup()
+
+ def setup(self):
+ pass
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
}
|
Add setup method to base serf handler
|
## Code Before:
import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
}
## Instruction:
Add setup method to base serf handler
## Code After:
import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
def __init__(self, *args, **kwargs):
super(BaseHandler, self).__init__(*args, **kwargs)
self.setup()
def setup(self):
pass
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
}
|
import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
+
+ def __init__(self, *args, **kwargs):
+ super(BaseHandler, self).__init__(*args, **kwargs)
+ self.setup()
+
+ def setup(self):
+ pass
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
}
|
ea026eeeaae0ee30a5f3a4cb9f8cc2a9d1c37e6c
|
jackrabbit/utils.py
|
jackrabbit/utils.py
|
import collections
def is_callable(o):
return isinstance(o, collections.Callable)
|
import collections
import sys
if sys.platform == 'win32':
from time import clock as time
else:
from time import time
def is_callable(o):
return isinstance(o, collections.Callable)
|
Add platform dependent time import for best resolution.
|
Add platform dependent time import for best resolution.
|
Python
|
mit
|
cbigler/jackrabbit
|
import collections
+ import sys
+
+ if sys.platform == 'win32':
+ from time import clock as time
+ else:
+ from time import time
def is_callable(o):
return isinstance(o, collections.Callable)
|
Add platform dependent time import for best resolution.
|
## Code Before:
import collections
def is_callable(o):
return isinstance(o, collections.Callable)
## Instruction:
Add platform dependent time import for best resolution.
## Code After:
import collections
import sys
if sys.platform == 'win32':
from time import clock as time
else:
from time import time
def is_callable(o):
return isinstance(o, collections.Callable)
|
import collections
+ import sys
+
+ if sys.platform == 'win32':
+ from time import clock as time
+ else:
+ from time import time
def is_callable(o):
return isinstance(o, collections.Callable)
|
cb28bba6ee642828df473383ea469a6aa46ca59c
|
skimage/util/unique.py
|
skimage/util/unique.py
|
import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
|
import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
|
Add note describing array copy if discontiguous
|
Add note describing array copy if discontiguous
|
Python
|
bsd-3-clause
|
paalge/scikit-image,Midafi/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,juliusbierk/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,keflavich/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,warmspringwinds/scikit-image,juliusbierk/scikit-image,blink1073/scikit-image,ofgulban/scikit-image,newville/scikit-image,michaelpacer/scikit-image,bsipocz/scikit-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,emon10005/scikit-image,Hiyorimi/scikit-image,Midafi/scikit-image,rjeli/scikit-image,newville/scikit-image,ClinicalGraphics/scikit-image,SamHames/scikit-image,paalge/scikit-image,michaelaye/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,oew1v07/scikit-image,youprofit/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,keflavich/scikit-image,SamHames/scikit-image,almarklein/scikit-image,youprofit/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,blink1073/scikit-image,almarklein/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,robintw/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,robintw/scikit-image,pratapvardhan/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,jwiggins/scikit-image,Hiyorimi/scikit-image,bsipocz/scikit-image
|
import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
+
+ Notes
+ -----
+ The function will generate a copy of `ar` if it is not
+ C-contiguous, which will negatively affect performance for large
+ input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
|
Add note describing array copy if discontiguous
|
## Code Before:
import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
## Instruction:
Add note describing array copy if discontiguous
## Code After:
import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
Notes
-----
The function will generate a copy of `ar` if it is not
C-contiguous, which will negatively affect performance for large
input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
|
import numpy as np
def unique_rows(ar):
"""Remove repeated rows from a 2D array.
Parameters
----------
ar : 2D np.ndarray
The input array.
Returns
-------
ar_out : 2D np.ndarray
A copy of the input array with repeated rows removed.
Raises
------
ValueError : if `ar` is not two-dimensional.
+
+ Notes
+ -----
+ The function will generate a copy of `ar` if it is not
+ C-contiguous, which will negatively affect performance for large
+ input arrays.
Examples
--------
>>> ar = np.array([[1, 0, 1],
[0, 1, 0],
[1, 0, 1]], np.uint8)
>>> aru = unique_rows(ar)
array([[0, 1, 0],
[1, 0, 1]], dtype=uint8)
"""
if ar.ndim != 2:
raise ValueError("unique_rows() only makes sense for 2D arrays, "
"got %dd" % ar.ndim)
# the view in the next line only works if the array is C-contiguous
ar = np.ascontiguousarray(ar)
# np.unique() finds identical items in a raveled array. To make it
# see each row as a single item, we create a view of each row as a
# byte string of length itemsize times number of columns in `ar`
ar_row_view = ar.view('|S%d' % (ar.itemsize * ar.shape[1]))
_, unique_row_indices = np.unique(ar_row_view, return_index=True)
ar_out = ar[unique_row_indices]
return ar_out
|
c1a5c5394ff9838e01b32ff448e309893c5bdf7f
|
cmsplugin_iframe/migrations/0001_initial.py
|
cmsplugin_iframe/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
Add in on_delete clause to work with more modern versions of Django
|
Add in on_delete clause to work with more modern versions of Django
|
Python
|
mit
|
satyrius/cmsplugin-iframe,satyrius/cmsplugin-iframe
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
- ('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
+ ('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
Add in on_delete clause to work with more modern versions of Django
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
## Instruction:
Add in on_delete clause to work with more modern versions of Django
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IframePlugin',
fields=[
- ('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
+ ('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin', on_delete=models.CASCADE)),
? ++++++++++++++++++++++++++
('src', models.URLField()),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
06f10e09f5b1c5766815b6e7eb219b4e33082709
|
check_urls.py
|
check_urls.py
|
import re, sys, markdown, requests, bs4 as BeautifulSoup
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
print 'Error checking URL %s: %s' % (url, e)
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
print 'checking URLs for %s' % (filename,)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
print msg, 'OK'
else:
print msg, 'FAILED'
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
print e
ok = False
exit (0 if ok else 1)
if __name__ == '__main__':
main()
|
from __future__ import print_function
import re, sys, markdown, requests, bs4 as BeautifulSoup
try: # Python 2
reload
except NameError: # Python 3
from importlib import reload
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
print('Error checking URL %s: %s' % (url, e))
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
print('checking URLs for %s' % filename)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
print(msg, 'OK')
else:
print(msg, 'FAILED')
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
print(e)
ok = False
exit(0 if ok else 1)
if __name__ == '__main__':
main()
|
Add Python 3 compatibility and flake8 testing
|
Add Python 3 compatibility and flake8 testing
|
Python
|
unlicense
|
ligurio/free-software-testing-books
|
+ from __future__ import print_function
import re, sys, markdown, requests, bs4 as BeautifulSoup
+
+ try: # Python 2
+ reload
+ except NameError: # Python 3
+ from importlib import reload
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
- print 'Error checking URL %s: %s' % (url, e)
+ print('Error checking URL %s: %s' % (url, e))
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
- print 'checking URLs for %s' % (filename,)
+ print('checking URLs for %s' % filename)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
- print msg, 'OK'
+ print(msg, 'OK')
else:
- print msg, 'FAILED'
+ print(msg, 'FAILED')
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
- print e
+ print(e)
ok = False
- exit (0 if ok else 1)
+ exit(0 if ok else 1)
if __name__ == '__main__':
main()
|
Add Python 3 compatibility and flake8 testing
|
## Code Before:
import re, sys, markdown, requests, bs4 as BeautifulSoup
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
print 'Error checking URL %s: %s' % (url, e)
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
print 'checking URLs for %s' % (filename,)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
print msg, 'OK'
else:
print msg, 'FAILED'
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
print e
ok = False
exit (0 if ok else 1)
if __name__ == '__main__':
main()
## Instruction:
Add Python 3 compatibility and flake8 testing
## Code After:
from __future__ import print_function
import re, sys, markdown, requests, bs4 as BeautifulSoup
try: # Python 2
reload
except NameError: # Python 3
from importlib import reload
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
print('Error checking URL %s: %s' % (url, e))
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
print('checking URLs for %s' % filename)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
print(msg, 'OK')
else:
print(msg, 'FAILED')
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
print(e)
ok = False
exit(0 if ok else 1)
if __name__ == '__main__':
main()
|
+ from __future__ import print_function
import re, sys, markdown, requests, bs4 as BeautifulSoup
+
+ try: # Python 2
+ reload
+ except NameError: # Python 3
+ from importlib import reload
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
- print 'Error checking URL %s: %s' % (url, e)
? ^
+ print('Error checking URL %s: %s' % (url, e))
? ^ +
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
- print 'checking URLs for %s' % (filename,)
? ^ - -
+ print('checking URLs for %s' % filename)
? ^
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
- print msg, 'OK'
? ^
+ print(msg, 'OK')
? ^ +
else:
- print msg, 'FAILED'
? ^
+ print(msg, 'FAILED')
? ^ +
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
- print e
? ^
+ print(e)
? ^ +
ok = False
- exit (0 if ok else 1)
? -
+ exit(0 if ok else 1)
if __name__ == '__main__':
main()
|
427b894fdd5690bc7a52dbcea42c4918b87d0046
|
run_tests.py
|
run_tests.py
|
import coverage
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
cov = coverage.coverage()
cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
cov.stop()
cov.html_report()
if __name__ == '__main__':
main()
|
try:
import coverage
except ImportError:
pass
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
if "coverage" in sys.modules:
cov = coverage.coverage()
cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
if "coverage" in sys.modules:
cov.stop()
cov.html_report()
if __name__ == '__main__':
main()
|
Make coverage module optional during test run
|
Make coverage module optional during test run
Change-Id: I79f767a90a84c7b482e0cc9acd311619611802e9
|
Python
|
apache-2.0
|
brainly/check-growth
|
+ try:
- import coverage
+ import coverage
+ except ImportError:
+ pass
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
+ if "coverage" in sys.modules:
- cov = coverage.coverage()
+ cov = coverage.coverage()
+ cov.start()
- cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
- cov.stop()
+ if "coverage" in sys.modules:
+ cov.stop()
- cov.html_report()
+ cov.html_report()
if __name__ == '__main__':
main()
|
Make coverage module optional during test run
|
## Code Before:
import coverage
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
cov = coverage.coverage()
cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
cov.stop()
cov.html_report()
if __name__ == '__main__':
main()
## Instruction:
Make coverage module optional during test run
## Code After:
try:
import coverage
except ImportError:
pass
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
if "coverage" in sys.modules:
cov = coverage.coverage()
cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
if "coverage" in sys.modules:
cov.stop()
cov.html_report()
if __name__ == '__main__':
main()
|
+ try:
- import coverage
+ import coverage
? ++++
+ except ImportError:
+ pass
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
+ if "coverage" in sys.modules:
- cov = coverage.coverage()
+ cov = coverage.coverage()
? ++++
+ cov.start()
- cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
- cov.stop()
+ if "coverage" in sys.modules:
+ cov.stop()
- cov.html_report()
+ cov.html_report()
? ++++
if __name__ == '__main__':
main()
|
62f9608d50898d0a82e013d54454ed1edb004cff
|
fab_deploy/joyent/setup.py
|
fab_deploy/joyent/setup.py
|
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
def _set_profile(self):
JoyentMixin._set_profile(self)
base_setup.AppSetup._set_profile(self)
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
Add environ vars for joyent
|
Add environ vars for joyent
|
Python
|
mit
|
ff0000/red-fab-deploy2,ff0000/red-fab-deploy2,ff0000/red-fab-deploy2
|
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
+
+ def _set_profile(self):
+ JoyentMixin._set_profile(self)
+ base_setup.AppSetup._set_profile(self)
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
Add environ vars for joyent
|
## Code Before:
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
## Instruction:
Add environ vars for joyent
## Code After:
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
def _set_profile(self):
JoyentMixin._set_profile(self)
base_setup.AppSetup._set_profile(self)
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
+
+ def _set_profile(self):
+ JoyentMixin._set_profile(self)
+ base_setup.AppSetup._set_profile(self)
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
ce5b3402d9dc5bf69b96c45a810a987d6d4b4231
|
tests/functional_tests/test_valid_recipes.py
|
tests/functional_tests/test_valid_recipes.py
|
import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
|
import os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
|
Change exception from RecipeError to SystemExit
|
Change exception from RecipeError to SystemExit
|
Python
|
bsd-3-clause
|
mandeep/conda-verify
|
import os
import pytest
from conda_verify import utilities
- from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
- except RecipeError as error:
+ except SystemExit as error:
pytest.fail(error)
|
Change exception from RecipeError to SystemExit
|
## Code Before:
import os
import pytest
from conda_verify import utilities
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except RecipeError as error:
pytest.fail(error)
## Instruction:
Change exception from RecipeError to SystemExit
## Code After:
import os
import pytest
from conda_verify import utilities
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
except SystemExit as error:
pytest.fail(error)
|
import os
import pytest
from conda_verify import utilities
- from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
@pytest.fixture
def recipe_dir():
return os.path.join(os.path.dirname(__file__), 'test_recipes')
@pytest.fixture
def verifier():
recipe_verifier = Verify()
return recipe_verifier
def test_valid_test_file(recipe_dir, verifier):
recipe = os.path.join(recipe_dir, 'valid_test_file')
metadata = utilities.render_metadata(recipe, None)
try:
verifier.verify_recipe(rendered_meta=metadata, recipe_dir=recipe)
- except RecipeError as error:
+ except SystemExit as error:
pytest.fail(error)
|
8268b050ae98180a55b9c2e5285de1af5b8ca3e5
|
pages/tests.py
|
pages/tests.py
|
from django.test import TestCase
from pages.models import *
from django.test.client import Client
class PagesTestCase(TestCase):
fixtures = ['tests.json']
def test_01_add_page(self):
"""
Test that the add admin page could be displayed correctly
"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
assert(response.status_code == 200)
|
from django.test import TestCase
from pages.models import *
from django.test.client import Client
class PagesTestCase(TestCase):
fixtures = ['tests.json']
def test_01_add_page(self):
"""
Test that the add admin page could be displayed via the admin
"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
assert(response.status_code == 200)
def test_02_create_page(self):
"""
Test that a page can be created via the admin
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = {'title':'test page', 'slug':'test-page', 'language':'en', 'sites':[1], 'status':1}
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
|
Add a create page test
|
Add a create page test
|
Python
|
bsd-3-clause
|
PiRSquared17/django-page-cms,google-code-export/django-page-cms,google-code-export/django-page-cms,odyaka341/django-page-cms,PiRSquared17/django-page-cms,odyaka341/django-page-cms,odyaka341/django-page-cms,google-code-export/django-page-cms,Alwnikrotikz/django-page-cms,odyaka341/django-page-cms,PiRSquared17/django-page-cms,Alwnikrotikz/django-page-cms,pombreda/django-page-cms,Alwnikrotikz/django-page-cms,PiRSquared17/django-page-cms,google-code-export/django-page-cms,pombreda/django-page-cms,Alwnikrotikz/django-page-cms,pombreda/django-page-cms,pombreda/django-page-cms
|
from django.test import TestCase
+
from pages.models import *
from django.test.client import Client
class PagesTestCase(TestCase):
fixtures = ['tests.json']
def test_01_add_page(self):
"""
- Test that the add admin page could be displayed correctly
+ Test that the add admin page could be displayed via the admin
"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
assert(response.status_code == 200)
-
+
+ def test_02_create_page(self):
+ """
+ Test that a page can be created via the admin
+ """
+ c = Client()
+ c.login(username= 'batiste', password='b')
+ page_data = {'title':'test page', 'slug':'test-page', 'language':'en', 'sites':[1], 'status':1}
+ response = c.post('/admin/pages/page/add/', page_data)
+ self.assertRedirects(response, '/admin/pages/page/')
+
|
Add a create page test
|
## Code Before:
from django.test import TestCase
from pages.models import *
from django.test.client import Client
class PagesTestCase(TestCase):
fixtures = ['tests.json']
def test_01_add_page(self):
"""
Test that the add admin page could be displayed correctly
"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
assert(response.status_code == 200)
## Instruction:
Add a create page test
## Code After:
from django.test import TestCase
from pages.models import *
from django.test.client import Client
class PagesTestCase(TestCase):
fixtures = ['tests.json']
def test_01_add_page(self):
"""
Test that the add admin page could be displayed via the admin
"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
assert(response.status_code == 200)
def test_02_create_page(self):
"""
Test that a page can be created via the admin
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = {'title':'test page', 'slug':'test-page', 'language':'en', 'sites':[1], 'status':1}
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
|
from django.test import TestCase
+
from pages.models import *
from django.test.client import Client
class PagesTestCase(TestCase):
fixtures = ['tests.json']
def test_01_add_page(self):
"""
- Test that the add admin page could be displayed correctly
? ^^^^ ^^^^
+ Test that the add admin page could be displayed via the admin
? ^^^^^^ ^^^^^^
"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
assert(response.status_code == 200)
+
+
+ def test_02_create_page(self):
-
+ """
? +++
+ Test that a page can be created via the admin
+ """
+ c = Client()
+ c.login(username= 'batiste', password='b')
+ page_data = {'title':'test page', 'slug':'test-page', 'language':'en', 'sites':[1], 'status':1}
+ response = c.post('/admin/pages/page/add/', page_data)
+ self.assertRedirects(response, '/admin/pages/page/')
|
39b63523634801fe8ef2cca03e11b3875d84cdbd
|
flare/flare_io.py
|
flare/flare_io.py
|
from flare.struc import Structure
from typing import List
from json import dump, load
from flare.util import NumpyEncoder
def md_trajectory_to_file(filename, structures: List[Structure]):
"""
Take a list of structures and write them to a json file.
:param filename:
:param structures:
"""
f = open(filename, 'w')
dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
f.close()
def md_trajectory_from_file(filename):
"""
Read a list of structures from a json file, formatted as in md_trajectory_to_file.
:param filename:
"""
f = open(filename, 'r')
structure_list = load(f)
structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
return structures
|
from flare.struc import Structure
from typing import List
from json import dump, load
from flare.util import NumpyEncoder
def md_trajectory_to_file(filename: str, structures: List[Structure]):
"""
Take a list of structures and write them to a json file.
:param filename:
:param structures:
"""
with open(filename, 'w') as f:
dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
def md_trajectory_from_file(filename: str):
"""
Read a list of structures from a json file, formatted as in md_trajectory_to_file.
:param filename:
"""
with open(filename, 'r') as f:
structure_list = load(f)
structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
return structures
|
Tweak syntax for f.close() concision, add typehints
|
Tweak syntax for f.close() concision, add typehints
|
Python
|
mit
|
mir-group/flare,mir-group/flare
|
from flare.struc import Structure
from typing import List
from json import dump, load
from flare.util import NumpyEncoder
- def md_trajectory_to_file(filename, structures: List[Structure]):
+ def md_trajectory_to_file(filename: str, structures: List[Structure]):
"""
Take a list of structures and write them to a json file.
:param filename:
:param structures:
"""
- f = open(filename, 'w')
+ with open(filename, 'w') as f:
- dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
+ dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
- f.close()
- def md_trajectory_from_file(filename):
+ def md_trajectory_from_file(filename: str):
"""
Read a list of structures from a json file, formatted as in md_trajectory_to_file.
:param filename:
"""
- f = open(filename, 'r')
+ with open(filename, 'r') as f:
- structure_list = load(f)
+ structure_list = load(f)
- structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
+ structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
return structures
|
Tweak syntax for f.close() concision, add typehints
|
## Code Before:
from flare.struc import Structure
from typing import List
from json import dump, load
from flare.util import NumpyEncoder
def md_trajectory_to_file(filename, structures: List[Structure]):
"""
Take a list of structures and write them to a json file.
:param filename:
:param structures:
"""
f = open(filename, 'w')
dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
f.close()
def md_trajectory_from_file(filename):
"""
Read a list of structures from a json file, formatted as in md_trajectory_to_file.
:param filename:
"""
f = open(filename, 'r')
structure_list = load(f)
structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
return structures
## Instruction:
Tweak syntax for f.close() concision, add typehints
## Code After:
from flare.struc import Structure
from typing import List
from json import dump, load
from flare.util import NumpyEncoder
def md_trajectory_to_file(filename: str, structures: List[Structure]):
"""
Take a list of structures and write them to a json file.
:param filename:
:param structures:
"""
with open(filename, 'w') as f:
dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
def md_trajectory_from_file(filename: str):
"""
Read a list of structures from a json file, formatted as in md_trajectory_to_file.
:param filename:
"""
with open(filename, 'r') as f:
structure_list = load(f)
structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
return structures
|
from flare.struc import Structure
from typing import List
from json import dump, load
from flare.util import NumpyEncoder
- def md_trajectory_to_file(filename, structures: List[Structure]):
+ def md_trajectory_to_file(filename: str, structures: List[Structure]):
? +++++
"""
Take a list of structures and write them to a json file.
:param filename:
:param structures:
"""
- f = open(filename, 'w')
? ^^^
+ with open(filename, 'w') as f:
? ^^^^ ++++++
- dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
+ dump([s.as_dict() for s in structures], f, cls=NumpyEncoder)
? +
- f.close()
- def md_trajectory_from_file(filename):
+ def md_trajectory_from_file(filename: str):
? +++++
"""
Read a list of structures from a json file, formatted as in md_trajectory_to_file.
:param filename:
"""
- f = open(filename, 'r')
? ^^^
+ with open(filename, 'r') as f:
? ^^^^ ++++++
- structure_list = load(f)
+ structure_list = load(f)
? +
- structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
+ structures = [Structure.from_dict(dictionary) for dictionary in structure_list]
? +
return structures
|
d0f18bed554c58873776eefba5b2be1d60926f95
|
elevator_cli/io.py
|
elevator_cli/io.py
|
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
|
import shlex
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = shlex.split(input_str.strip())
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
|
Update : protect cli quoted arguments while parsing
|
Update : protect cli quoted arguments while parsing
|
Python
|
mit
|
oleiade/Elevator
|
+
+ import shlex
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
- input_str = input_str.strip().split()
+ input_str = shlex.split(input_str.strip())
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
|
Update : protect cli quoted arguments while parsing
|
## Code Before:
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
## Instruction:
Update : protect cli quoted arguments while parsing
## Code After:
import shlex
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = shlex.split(input_str.strip())
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
|
+
+ import shlex
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
- input_str = input_str.strip().split()
? -------
+ input_str = shlex.split(input_str.strip())
? ++++++++++++
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
|
dc512b896ca7311c0c04dc11b5283dc0ffb4f1e1
|
seating_charts/management/commands/sync_students.py
|
seating_charts/management/commands/sync_students.py
|
import logging
from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from validate_email import validate_email
from academics.models import Enrollment, AcademicYear
from seating_charts.models import SeatingStudent
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync academic students with seating students"
def handle(self, *args, **kwargs):
academic_year = AcademicYear.objects.current()
current_enrollments = Enrollment.objects.filter(student__current=True, academic_year=academic_year)
for enrollment in current_enrollments:
#Get the seating student based on the student, not the enrollment
try:
seating_student = SeatingStudent.objects.get(enrollment__student=enrollment.student)
#We found a seating student, but the enrollment was incorrect
if seating_student.enrollment != enrollment:
seating_student.enrollment = enrollment
seating_student.save()
except SeatingStudent.DoesNotExist:
#We did not find a seating student
seating_student = SeatingStudent()
seating_student.enrollment = enrollment
seating_student.save()
|
import logging
from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from validate_email import validate_email
from academics.models import Enrollment, AcademicYear
from seating_charts.models import SeatingStudent
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync academic students with seating students"
def handle(self, *args, **kwargs):
academic_year = AcademicYear.objects.current()
current_enrollments = Enrollment.objects.filter(student__current=True, academic_year=academic_year)
for enrollment in current_enrollments:
#Get the seating student based on the student, not the enrollment
try:
seating_student = SeatingStudent.objects.get(enrollment__student=enrollment.student)
#We found a seating student, but the enrollment was incorrect
if seating_student.enrollment != enrollment:
seating_student.enrollment = enrollment
seating_student.save()
except SeatingStudent.DoesNotExist:
#We did not find a seating student
seating_student = SeatingStudent()
seating_student.enrollment = enrollment
seating_student.save()
# Remove extra students
extra_students = SeatingStudent.objects.exclude(enrollment__student__in=[e.student for e in current_enrollments]).all()
extra_students.delete()
|
Remove extra seating students during sync
|
Remove extra seating students during sync
|
Python
|
mit
|
rectory-school/rectory-apps,rectory-school/rectory-apps,rectory-school/rectory-apps,rectory-school/rectory-apps,rectory-school/rectory-apps
|
import logging
from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from validate_email import validate_email
from academics.models import Enrollment, AcademicYear
from seating_charts.models import SeatingStudent
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync academic students with seating students"
def handle(self, *args, **kwargs):
academic_year = AcademicYear.objects.current()
current_enrollments = Enrollment.objects.filter(student__current=True, academic_year=academic_year)
for enrollment in current_enrollments:
#Get the seating student based on the student, not the enrollment
try:
seating_student = SeatingStudent.objects.get(enrollment__student=enrollment.student)
#We found a seating student, but the enrollment was incorrect
if seating_student.enrollment != enrollment:
seating_student.enrollment = enrollment
seating_student.save()
except SeatingStudent.DoesNotExist:
#We did not find a seating student
seating_student = SeatingStudent()
seating_student.enrollment = enrollment
seating_student.save()
-
+ # Remove extra students
+ extra_students = SeatingStudent.objects.exclude(enrollment__student__in=[e.student for e in current_enrollments]).all()
+ extra_students.delete()
|
Remove extra seating students during sync
|
## Code Before:
import logging
from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from validate_email import validate_email
from academics.models import Enrollment, AcademicYear
from seating_charts.models import SeatingStudent
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync academic students with seating students"
def handle(self, *args, **kwargs):
academic_year = AcademicYear.objects.current()
current_enrollments = Enrollment.objects.filter(student__current=True, academic_year=academic_year)
for enrollment in current_enrollments:
#Get the seating student based on the student, not the enrollment
try:
seating_student = SeatingStudent.objects.get(enrollment__student=enrollment.student)
#We found a seating student, but the enrollment was incorrect
if seating_student.enrollment != enrollment:
seating_student.enrollment = enrollment
seating_student.save()
except SeatingStudent.DoesNotExist:
#We did not find a seating student
seating_student = SeatingStudent()
seating_student.enrollment = enrollment
seating_student.save()
## Instruction:
Remove extra seating students during sync
## Code After:
import logging
from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from validate_email import validate_email
from academics.models import Enrollment, AcademicYear
from seating_charts.models import SeatingStudent
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync academic students with seating students"
def handle(self, *args, **kwargs):
academic_year = AcademicYear.objects.current()
current_enrollments = Enrollment.objects.filter(student__current=True, academic_year=academic_year)
for enrollment in current_enrollments:
#Get the seating student based on the student, not the enrollment
try:
seating_student = SeatingStudent.objects.get(enrollment__student=enrollment.student)
#We found a seating student, but the enrollment was incorrect
if seating_student.enrollment != enrollment:
seating_student.enrollment = enrollment
seating_student.save()
except SeatingStudent.DoesNotExist:
#We did not find a seating student
seating_student = SeatingStudent()
seating_student.enrollment = enrollment
seating_student.save()
# Remove extra students
extra_students = SeatingStudent.objects.exclude(enrollment__student__in=[e.student for e in current_enrollments]).all()
extra_students.delete()
|
import logging
from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from validate_email import validate_email
from academics.models import Enrollment, AcademicYear
from seating_charts.models import SeatingStudent
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync academic students with seating students"
def handle(self, *args, **kwargs):
academic_year = AcademicYear.objects.current()
current_enrollments = Enrollment.objects.filter(student__current=True, academic_year=academic_year)
for enrollment in current_enrollments:
#Get the seating student based on the student, not the enrollment
try:
seating_student = SeatingStudent.objects.get(enrollment__student=enrollment.student)
#We found a seating student, but the enrollment was incorrect
if seating_student.enrollment != enrollment:
seating_student.enrollment = enrollment
seating_student.save()
except SeatingStudent.DoesNotExist:
#We did not find a seating student
seating_student = SeatingStudent()
seating_student.enrollment = enrollment
seating_student.save()
+ # Remove extra students
+ extra_students = SeatingStudent.objects.exclude(enrollment__student__in=[e.student for e in current_enrollments]).all()
+ extra_students.delete()
|
e90cc22226189b8950957cbf8637e49ee7798c4b
|
django_token/middleware.py
|
django_token/middleware.py
|
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'').split()
if not auth_header or auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if len(auth_header) != 2:
return HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[1])
if user:
request.user = user
|
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'').partition(' ')
if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
Use partition instead of split.
|
Use partition instead of split.
|
Python
|
mit
|
jasonbeverage/django-token
|
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
- auth_header = request.META.get('HTTP_AUTHORIZATION', b'').split()
+ auth_header = request.META.get('HTTP_AUTHORIZATION', b'').partition(' ')
- if not auth_header or auth_header[0].lower() != b'token':
+ if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
- if len(auth_header) != 2:
+ if not auth_header[2]:
return HttpResponseBadRequest("Improperly formatted token")
- user = auth.authenticate(token=auth_header[1])
+ user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
Use partition instead of split.
|
## Code Before:
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'').split()
if not auth_header or auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if len(auth_header) != 2:
return HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[1])
if user:
request.user = user
## Instruction:
Use partition instead of split.
## Code After:
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'').partition(' ')
if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
- auth_header = request.META.get('HTTP_AUTHORIZATION', b'').split()
? - ^
+ auth_header = request.META.get('HTTP_AUTHORIZATION', b'').partition(' ')
? ^^^ +++ +++
- if not auth_header or auth_header[0].lower() != b'token':
? -------------------
+ if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
- if len(auth_header) != 2:
? -- ^ ^^^^^
+ if not auth_header[2]:
? ^^^ ^ +
return HttpResponseBadRequest("Improperly formatted token")
- user = auth.authenticate(token=auth_header[1])
? ^
+ user = auth.authenticate(token=auth_header[2])
? ^
if user:
request.user = user
|
0324d220872ef063cb39ce62264bd4835f260920
|
test_project/urls.py
|
test_project/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
from django.contrib.auth import views as auth_views
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', auth_views.login, name='login'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
Replace str into call in url
|
Replace str into call in url
|
Python
|
bsd-3-clause
|
makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
+ from django.contrib.auth import views as auth_views
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
- url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
+ url(r'^login/$', auth_views.login, name='login'),
- url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',),
+ url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
Replace str into call in url
|
## Code Before:
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
## Instruction:
Replace str into call in url
## Code After:
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
from django.contrib.auth import views as auth_views
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', auth_views.login, name='login'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
+ from django.contrib.auth import views as auth_views
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
- url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
? ---------------- ^ -
+ url(r'^login/$', auth_views.login, name='login'),
? ^
- url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',),
? ---------------- ^ -
+ url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',),
? ^
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
ea287349360f6102369df3bdc8efb64a684a95ca
|
program/templatetags/timeslots.py
|
program/templatetags/timeslots.py
|
from django import template
from datetime import datetime, time, timedelta
register = template.Library()
@register.simple_tag
def height(start, end):
if start.year == 2020 and int(start.strftime('%V')) >= 5 and start.hour == 12 and start.minute == 0:
return '30'
else:
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_until(end):
start = datetime.combine(end.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_since(start):
if start.time() < time(23, 59):
end = datetime.combine(start.date() + timedelta(days=1), time(6, 0))
else:
end = datetime.combine(start.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
|
from django import template
from datetime import datetime, time, timedelta
register = template.Library()
@register.simple_tag
def height(start, end):
if start.year == 2020 and int(start.strftime('%V')) >= 5 and start.hour == 12 and start.minute == 0:
if end.minute == 5:
return '30'
return '%d' % (((end - start).seconds / 60) + 25)
else:
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_until(end):
start = datetime.combine(end.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_since(start):
if start.time() < time(23, 59):
end = datetime.combine(start.date() + timedelta(days=1), time(6, 0))
else:
end = datetime.combine(start.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
|
Add exception to to return extended height of the show
|
Add exception to to return extended height of the show
|
Python
|
unknown
|
radio-helsinki-graz/pv,radio-helsinki-graz/pv,nnrcschmdt/helsinki,radio-helsinki-graz/pv,nnrcschmdt/helsinki,nnrcschmdt/helsinki
|
from django import template
from datetime import datetime, time, timedelta
register = template.Library()
@register.simple_tag
def height(start, end):
if start.year == 2020 and int(start.strftime('%V')) >= 5 and start.hour == 12 and start.minute == 0:
+ if end.minute == 5:
- return '30'
+ return '30'
+ return '%d' % (((end - start).seconds / 60) + 25)
else:
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_until(end):
start = datetime.combine(end.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_since(start):
if start.time() < time(23, 59):
end = datetime.combine(start.date() + timedelta(days=1), time(6, 0))
else:
end = datetime.combine(start.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
|
Add exception to to return extended height of the show
|
## Code Before:
from django import template
from datetime import datetime, time, timedelta
register = template.Library()
@register.simple_tag
def height(start, end):
if start.year == 2020 and int(start.strftime('%V')) >= 5 and start.hour == 12 and start.minute == 0:
return '30'
else:
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_until(end):
start = datetime.combine(end.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_since(start):
if start.time() < time(23, 59):
end = datetime.combine(start.date() + timedelta(days=1), time(6, 0))
else:
end = datetime.combine(start.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
## Instruction:
Add exception to to return extended height of the show
## Code After:
from django import template
from datetime import datetime, time, timedelta
register = template.Library()
@register.simple_tag
def height(start, end):
if start.year == 2020 and int(start.strftime('%V')) >= 5 and start.hour == 12 and start.minute == 0:
if end.minute == 5:
return '30'
return '%d' % (((end - start).seconds / 60) + 25)
else:
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_until(end):
start = datetime.combine(end.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_since(start):
if start.time() < time(23, 59):
end = datetime.combine(start.date() + timedelta(days=1), time(6, 0))
else:
end = datetime.combine(start.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
|
from django import template
from datetime import datetime, time, timedelta
register = template.Library()
@register.simple_tag
def height(start, end):
if start.year == 2020 and int(start.strftime('%V')) >= 5 and start.hour == 12 and start.minute == 0:
+ if end.minute == 5:
- return '30'
+ return '30'
? ++++
+ return '%d' % (((end - start).seconds / 60) + 25)
else:
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_until(end):
start = datetime.combine(end.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
@register.simple_tag
def height_since(start):
if start.time() < time(23, 59):
end = datetime.combine(start.date() + timedelta(days=1), time(6, 0))
else:
end = datetime.combine(start.date(), time(6, 0))
return '%d' % ((end - start).seconds / 60)
|
0f9d3b0ed9efc72b8b3fd4d466caa4517691546c
|
strategies/alexStrategies.py
|
strategies/alexStrategies.py
|
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
|
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
class CardCounter:
"""This strategy folds based on card counting expectation values."""
def __init__(self, scared=0.23):
from collections import Counter
self.Counter = Counter
self.scared = scared
def play(self, info):
c = self.Counter(info.deck)
if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
|
Add a simple card counting strategy
|
Add a simple card counting strategy
|
Python
|
mit
|
AlexMooney/pairsTournament
|
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
+ class CardCounter:
+ """This strategy folds based on card counting expectation values."""
+ def __init__(self, scared=0.23):
+ from collections import Counter
+ self.Counter = Counter
+ self.scared = scared
+ def play(self, info):
+ c = self.Counter(info.deck)
+ if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
+ return 'Hit me'
+ else:
+ return 'fold'
+
|
Add a simple card counting strategy
|
## Code Before:
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
## Instruction:
Add a simple card counting strategy
## Code After:
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
class CardCounter:
"""This strategy folds based on card counting expectation values."""
def __init__(self, scared=0.23):
from collections import Counter
self.Counter = Counter
self.scared = scared
def play(self, info):
c = self.Counter(info.deck)
if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
|
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
+
+ class CardCounter:
+ """This strategy folds based on card counting expectation values."""
+ def __init__(self, scared=0.23):
+ from collections import Counter
+ self.Counter = Counter
+ self.scared = scared
+ def play(self, info):
+ c = self.Counter(info.deck)
+ if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
+ return 'Hit me'
+ else:
+ return 'fold'
|
81e3425bc6b2b9b35071afd7c14322f0dd52b418
|
oneanddone/tests/functional/tests/test_task_assignment.py
|
oneanddone/tests/functional/tests/test_task_assignment.py
|
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
Add positive test for search
|
Add positive test for search
|
Python
|
mpl-2.0
|
VarnaSuresh/oneanddone,VarnaSuresh/oneanddone,VarnaSuresh/oneanddone,VarnaSuresh/oneanddone
|
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
- def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
+ def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
+
+ # Check if assignable task is found
+ home_page.search_for_task(task.name)
+ assert len(available_tasks_page.available_tasks)
+
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
Add positive test for search
|
## Code Before:
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
## Instruction:
Add positive test for search
## Code After:
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
- def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
+ def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
? ++++++
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
+
+ # Check if assignable task is found
+ home_page.search_for_task(task.name)
+ assert len(available_tasks_page.available_tasks)
+
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
6b26102efdee4ae365ddd0bce126d6045865a9bc
|
stock.py
|
stock.py
|
import bisect
import collections
PriceEvent = collections.namedtuple("PriceEvent", ["timestamp", "price"])
class Stock:
def __init__(self, symbol):
"""Constructor for Stock instance.
Args:
symbol: The stock symbol.
"""
self.symbol = symbol
self.price_history = []
@property
def price(self):
"""Returns the stocks most recent price.
Returns: Most recent price.
"""
return self.price_history[-1].price if self.price_history else None
def update(self, timestamp, price):
"""Updates the stock's price history.
Args:
timestamp: The timestamp of the update.
price: The new price of the stock.
"""
if price < 0:
raise ValueError("price should not be negative")
bisect.insort_left(self.price_history, PriceEvent(timestamp, price))
def is_increasing_trend(self):
"""Determines if last three prices were ascending in value.
Returns: True if there is an increasing trend, False if not.
"""
return self.price_history[-3].price < self.price_history[-2].price < self.price_history[-1].price
|
import bisect
import collections
stock_price_event = collections.namedtuple("stock_price_event", ["timestamp", "price"])
class Stock:
def __init__(self, symbol):
"""A Stock object representing its price history.
Args:
symbol (str): The stock symbol.
Attributes:
symbol (str): The stock symbol.
price (float): The most recent price.
"""
self.symbol = symbol
self.price_history = []
@property
def price(self):
"""Returns the stocks most recent price.
Returns: Most recent price.
"""
return self.price_history[-1].price if self.price_history else None
def update(self, timestamp, price):
"""Updates the stock's price history.
Args:
timestamp: The timestamp of the update.
price: The new price of the stock.
"""
if price < 0:
raise ValueError("price should not be negative")
bisect.insort_left(self.price_history, stock_price_event(timestamp, price))
def is_increasing_trend(self):
"""Determines if last three prices were ascending in value.
Returns: True if there is an increasing trend, False if not.
"""
return self.price_history[-3].price < self.price_history[-2].price < self.price_history[-1].price
|
Update comments and variable names.
|
Update comments and variable names.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
import bisect
import collections
- PriceEvent = collections.namedtuple("PriceEvent", ["timestamp", "price"])
+ stock_price_event = collections.namedtuple("stock_price_event", ["timestamp", "price"])
class Stock:
def __init__(self, symbol):
- """Constructor for Stock instance.
+ """A Stock object representing its price history.
Args:
- symbol: The stock symbol.
+ symbol (str): The stock symbol.
+
+ Attributes:
+ symbol (str): The stock symbol.
+ price (float): The most recent price.
+
"""
self.symbol = symbol
self.price_history = []
@property
def price(self):
"""Returns the stocks most recent price.
Returns: Most recent price.
"""
return self.price_history[-1].price if self.price_history else None
def update(self, timestamp, price):
"""Updates the stock's price history.
Args:
timestamp: The timestamp of the update.
price: The new price of the stock.
+
"""
if price < 0:
raise ValueError("price should not be negative")
- bisect.insort_left(self.price_history, PriceEvent(timestamp, price))
+ bisect.insort_left(self.price_history, stock_price_event(timestamp, price))
def is_increasing_trend(self):
"""Determines if last three prices were ascending in value.
Returns: True if there is an increasing trend, False if not.
"""
return self.price_history[-3].price < self.price_history[-2].price < self.price_history[-1].price
|
Update comments and variable names.
|
## Code Before:
import bisect
import collections
PriceEvent = collections.namedtuple("PriceEvent", ["timestamp", "price"])
class Stock:
def __init__(self, symbol):
"""Constructor for Stock instance.
Args:
symbol: The stock symbol.
"""
self.symbol = symbol
self.price_history = []
@property
def price(self):
"""Returns the stocks most recent price.
Returns: Most recent price.
"""
return self.price_history[-1].price if self.price_history else None
def update(self, timestamp, price):
"""Updates the stock's price history.
Args:
timestamp: The timestamp of the update.
price: The new price of the stock.
"""
if price < 0:
raise ValueError("price should not be negative")
bisect.insort_left(self.price_history, PriceEvent(timestamp, price))
def is_increasing_trend(self):
"""Determines if last three prices were ascending in value.
Returns: True if there is an increasing trend, False if not.
"""
return self.price_history[-3].price < self.price_history[-2].price < self.price_history[-1].price
## Instruction:
Update comments and variable names.
## Code After:
import bisect
import collections
stock_price_event = collections.namedtuple("stock_price_event", ["timestamp", "price"])
class Stock:
def __init__(self, symbol):
"""A Stock object representing its price history.
Args:
symbol (str): The stock symbol.
Attributes:
symbol (str): The stock symbol.
price (float): The most recent price.
"""
self.symbol = symbol
self.price_history = []
@property
def price(self):
"""Returns the stocks most recent price.
Returns: Most recent price.
"""
return self.price_history[-1].price if self.price_history else None
def update(self, timestamp, price):
"""Updates the stock's price history.
Args:
timestamp: The timestamp of the update.
price: The new price of the stock.
"""
if price < 0:
raise ValueError("price should not be negative")
bisect.insort_left(self.price_history, stock_price_event(timestamp, price))
def is_increasing_trend(self):
"""Determines if last three prices were ascending in value.
Returns: True if there is an increasing trend, False if not.
"""
return self.price_history[-3].price < self.price_history[-2].price < self.price_history[-1].price
|
import bisect
import collections
- PriceEvent = collections.namedtuple("PriceEvent", ["timestamp", "price"])
? ^ ^ ^ ^
+ stock_price_event = collections.namedtuple("stock_price_event", ["timestamp", "price"])
? ^^^^^^^ ^^ ^^^^^^^ ^^
class Stock:
def __init__(self, symbol):
- """Constructor for Stock instance.
+ """A Stock object representing its price history.
Args:
- symbol: The stock symbol.
+ symbol (str): The stock symbol.
? ++++++
+
+ Attributes:
+ symbol (str): The stock symbol.
+ price (float): The most recent price.
+
"""
self.symbol = symbol
self.price_history = []
@property
def price(self):
"""Returns the stocks most recent price.
Returns: Most recent price.
"""
return self.price_history[-1].price if self.price_history else None
def update(self, timestamp, price):
"""Updates the stock's price history.
Args:
timestamp: The timestamp of the update.
price: The new price of the stock.
+
"""
if price < 0:
raise ValueError("price should not be negative")
- bisect.insort_left(self.price_history, PriceEvent(timestamp, price))
? ^ ^
+ bisect.insort_left(self.price_history, stock_price_event(timestamp, price))
? ^^^^^^^ ^^
def is_increasing_trend(self):
"""Determines if last three prices were ascending in value.
Returns: True if there is an increasing trend, False if not.
"""
return self.price_history[-3].price < self.price_history[-2].price < self.price_history[-1].price
|
41e29433da8f7db803ddd76ac7c7d543c69ad41c
|
account_journal_period_close/tests/__init__.py
|
account_journal_period_close/tests/__init__.py
|
from . import test_account_journal_period_close
|
from . import test_account_journal_period_close
checks = [
test_account_journal_period_close,
]
|
Add checks list on tests init file
|
[ADD] Add checks list on tests init file
|
Python
|
agpl-3.0
|
open-synergy/account-financial-tools,factorlibre/account-financial-tools,damdam-s/account-financial-tools,VitalPet/account-financial-tools,credativUK/account-financial-tools,open-synergy/account-financial-tools,abstract-open-solutions/account-financial-tools,taktik/account-financial-tools,nagyv/account-financial-tools,acsone/account-financial-tools,adhoc-dev/oca-account-financial-tools,andrius-preimantas/account-financial-tools,Domatix/account-financial-tools,ClearCorp-dev/account-financial-tools,bringsvor/account-financial-tools,xpansa/account-financial-tools,Antiun/account-financial-tools,acsone/account-financial-tools,Endika/account-financial-tools,xpansa/account-financial-tools,OpenPymeMx/account-financial-tools,syci/account-financial-tools,vauxoo-dev/account-financial-tools,akretion/account-financial-tools,luc-demeyer/account-financial-tools,syci/account-financial-tools,damdam-s/account-financial-tools,alhashash/account-financial-tools,raycarnes/account-financial-tools,dvitme/account-financial-tools,rschnapka/account-financial-tools,adhoc-dev/oca-account-financial-tools,open-synergy/account-financial-tools,VitalPet/account-financial-tools,lepistone/account-financial-tools,yelizariev/account-financial-tools,andhit-r/account-financial-tools,amoya-dx/account-financial-tools,pedrobaeza/account-financial-tools,charbeljc/account-financial-tools,OpenPymeMx/account-financial-tools,factorlibre/account-financial-tools,credativUK/account-financial-tools,Antiun/account-financial-tools,nagyv/account-financial-tools,bringsvor/account-financial-tools,cysnake4713/account-financial-tools,pedrobaeza/account-financial-tools,dvitme/account-financial-tools,iDTLabssl/account-financial-tools,vauxoo-dev/account-financial-tools,iDTLabssl/account-financial-tools,Endika/account-financial-tools,amoya-dx/account-financial-tools,VitalPet/account-financial-tools,cysnake4713/account-financial-tools,yelizariev/account-financial-tools,Domatix/account-financial-tools,abstract-open-solutions/account-financial-tools,diagramsoftware/account-financial-tools,ClearCorp-dev/account-financial-tools,raycarnes/account-financial-tools,rschnapka/account-financial-tools,luc-demeyer/account-financial-tools,lepistone/account-financial-tools,DarkoNikolovski/account-financial-tools,Pexego/account-financial-tools,alhashash/account-financial-tools,andhit-r/account-financial-tools,Nowheresly/account-financial-tools,Domatix/account-financial-tools,acsone/account-financial-tools,OpenPymeMx/account-financial-tools,Pexego/account-financial-tools,akretion/account-financial-tools,DarkoNikolovski/account-financial-tools,diagramsoftware/account-financial-tools,taktik/account-financial-tools,Nowheresly/account-financial-tools,charbeljc/account-financial-tools,andrius-preimantas/account-financial-tools
|
from . import test_account_journal_period_close
+
+ checks = [
+ test_account_journal_period_close,
+ ]
+
|
Add checks list on tests init file
|
## Code Before:
from . import test_account_journal_period_close
## Instruction:
Add checks list on tests init file
## Code After:
from . import test_account_journal_period_close
checks = [
test_account_journal_period_close,
]
|
from . import test_account_journal_period_close
+
+
+ checks = [
+ test_account_journal_period_close,
+ ]
|
ff460f9a3c7df3322271eeb5de3bead72fe121bc
|
bmi_tester/tests_pytest/test_grid_uniform_rectilinear.py
|
bmi_tester/tests_pytest/test_grid_uniform_rectilinear.py
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
Remove test for get_grid_shape for scalar grids.
|
Remove test for get_grid_shape for scalar grids.
|
Python
|
mit
|
csdms/bmi-tester
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
- if gtype == 'scalar':
- ndim = new_bmi.get_grid_rank(gid)
-
- shape = np.empty(ndim, dtype=np.int32)
- try:
- rtn = new_bmi.get_grid_shape(gid, shape)
- except TypeError:
- warnings.warn('get_grid_shape should take two arguments')
- rtn = new_bmi.get_grid_shape(gid)
- else:
- assert rtn is shape
- np.testing.assert_equal(shape, ())
-
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
Remove test for get_grid_shape for scalar grids.
|
## Code Before:
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
## Instruction:
Remove test for get_grid_shape for scalar grids.
## Code After:
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
- if gtype == 'scalar':
- ndim = new_bmi.get_grid_rank(gid)
-
- shape = np.empty(ndim, dtype=np.int32)
- try:
- rtn = new_bmi.get_grid_shape(gid, shape)
- except TypeError:
- warnings.warn('get_grid_shape should take two arguments')
- rtn = new_bmi.get_grid_shape(gid)
- else:
- assert rtn is shape
- np.testing.assert_equal(shape, ())
-
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
207a1a8fad79ccfa0c244aa0a1d0d25fee87c438
|
testfixtures/tests/test_docs.py
|
testfixtures/tests/test_docs.py
|
from doctest import REPORT_NDIFF,ELLIPSIS
from glob import glob
from manuel import doctest, capture
from manuel.testing import TestSuite
from nose.plugins.skip import SkipTest
from os.path import dirname, join, pardir
import os
from . import compat
path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs'))
tests = glob(join(path,'*.txt'))
if not tests:
raise SkipTest('No docs found to test') # pragma: no cover
def test_suite():
m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS)
m += compat.Manuel()
m += capture.Manuel()
return TestSuite(m, *tests)
|
from doctest import REPORT_NDIFF,ELLIPSIS
from glob import glob
from manuel import doctest, capture
from manuel.testing import TestSuite
from nose.plugins.skip import SkipTest
from os.path import dirname, join, pardir
import os
from . import compat
workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir))
tests = glob(join(workspace,'docs', '*.txt'))
if not tests:
raise SkipTest('No docs found to test') # pragma: no cover
def test_suite():
m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS)
m += compat.Manuel()
m += capture.Manuel()
return TestSuite(m, *tests)
|
Use a WORKSPACE to make life easier in Jenkins.
|
Use a WORKSPACE to make life easier in Jenkins.
|
Python
|
mit
|
nebulans/testfixtures,Simplistix/testfixtures
|
from doctest import REPORT_NDIFF,ELLIPSIS
from glob import glob
from manuel import doctest, capture
from manuel.testing import TestSuite
from nose.plugins.skip import SkipTest
from os.path import dirname, join, pardir
import os
from . import compat
- path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs'))
+ workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir))
- tests = glob(join(path,'*.txt'))
+ tests = glob(join(workspace,'docs', '*.txt'))
if not tests:
raise SkipTest('No docs found to test') # pragma: no cover
def test_suite():
m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS)
m += compat.Manuel()
m += capture.Manuel()
return TestSuite(m, *tests)
|
Use a WORKSPACE to make life easier in Jenkins.
|
## Code Before:
from doctest import REPORT_NDIFF,ELLIPSIS
from glob import glob
from manuel import doctest, capture
from manuel.testing import TestSuite
from nose.plugins.skip import SkipTest
from os.path import dirname, join, pardir
import os
from . import compat
path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs'))
tests = glob(join(path,'*.txt'))
if not tests:
raise SkipTest('No docs found to test') # pragma: no cover
def test_suite():
m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS)
m += compat.Manuel()
m += capture.Manuel()
return TestSuite(m, *tests)
## Instruction:
Use a WORKSPACE to make life easier in Jenkins.
## Code After:
from doctest import REPORT_NDIFF,ELLIPSIS
from glob import glob
from manuel import doctest, capture
from manuel.testing import TestSuite
from nose.plugins.skip import SkipTest
from os.path import dirname, join, pardir
import os
from . import compat
workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir))
tests = glob(join(workspace,'docs', '*.txt'))
if not tests:
raise SkipTest('No docs found to test') # pragma: no cover
def test_suite():
m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS)
m += compat.Manuel()
m += capture.Manuel()
return TestSuite(m, *tests)
|
from doctest import REPORT_NDIFF,ELLIPSIS
from glob import glob
from manuel import doctest, capture
from manuel.testing import TestSuite
from nose.plugins.skip import SkipTest
from os.path import dirname, join, pardir
import os
from . import compat
- path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs'))
? ^^ ^ ^ ^^ -------
+ workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir))
? +++++ ^^ ^ ^^^ ^^ + +
- tests = glob(join(path,'*.txt'))
? ^^
+ tests = glob(join(workspace,'docs', '*.txt'))
? +++++ ^^ ++++++++
if not tests:
raise SkipTest('No docs found to test') # pragma: no cover
def test_suite():
m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS)
m += compat.Manuel()
m += capture.Manuel()
return TestSuite(m, *tests)
|
268753ad6e4c3345e821c541e1851ee7f7a2b649
|
eachday/tests/test_resource_utils.py
|
eachday/tests/test_resource_utils.py
|
from eachday.tests.base import BaseTestCase
import json
class TestResourceUtils(BaseTestCase):
def test_invalid_json_error(self):
''' Test that an invalid JSON body has a decent error message '''
resp = self.client.post(
'/register',
data='{"invalid": json}',
content_type='application/json'
)
data = json.loads(resp.data.decode())
self.assertEqual(resp.status_code, 400)
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Invalid JSON body')
|
from eachday.resources import LoginResource
from eachday.tests.base import BaseTestCase
from unittest.mock import patch
import json
class TestResourceUtils(BaseTestCase):
def test_invalid_json_error(self):
''' Test that an invalid JSON body has a decent error message '''
resp = self.client.post(
'/register',
data='{"invalid": json}',
content_type='application/json'
)
data = json.loads(resp.data.decode())
self.assertEqual(resp.status_code, 400)
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Invalid JSON body')
class TestExceptionHandler(BaseTestCase):
@patch('eachday.resources.db')
@patch('eachday.resources.log')
def test_exception_handling(self, LogMock, DbMock):
''' Test that internal server errors are handled gracefully '''
view_exception = Exception('Uh oh')
# Inject function into /login to raise an exception
def raise_error(self):
raise view_exception
LoginResource.post = raise_error
resp = self.client.post('/login')
self.assertEqual(resp.status_code, 500)
data = json.loads(resp.data.decode())
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Something went wrong.')
# Make sure session gets rolled back
self.assertTrue(DbMock.session.rollback.called)
# Make sure exception is logged correctly
LogMock.error.assert_called_with(view_exception)
LogMock.info.assert_called_with('Rolled back current session')
|
Add test for exception handling in flask app
|
Add test for exception handling in flask app
|
Python
|
mit
|
bcongdon/EachDay,bcongdon/EachDay,bcongdon/EachDay,bcongdon/EachDay
|
+ from eachday.resources import LoginResource
from eachday.tests.base import BaseTestCase
+ from unittest.mock import patch
import json
class TestResourceUtils(BaseTestCase):
def test_invalid_json_error(self):
''' Test that an invalid JSON body has a decent error message '''
resp = self.client.post(
'/register',
data='{"invalid": json}',
content_type='application/json'
)
data = json.loads(resp.data.decode())
self.assertEqual(resp.status_code, 400)
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Invalid JSON body')
+
+ class TestExceptionHandler(BaseTestCase):
+
+ @patch('eachday.resources.db')
+ @patch('eachday.resources.log')
+ def test_exception_handling(self, LogMock, DbMock):
+ ''' Test that internal server errors are handled gracefully '''
+ view_exception = Exception('Uh oh')
+
+ # Inject function into /login to raise an exception
+ def raise_error(self):
+ raise view_exception
+ LoginResource.post = raise_error
+
+ resp = self.client.post('/login')
+ self.assertEqual(resp.status_code, 500)
+ data = json.loads(resp.data.decode())
+ self.assertEqual(data['status'], 'error')
+ self.assertEqual(data['error'], 'Something went wrong.')
+
+ # Make sure session gets rolled back
+ self.assertTrue(DbMock.session.rollback.called)
+
+ # Make sure exception is logged correctly
+ LogMock.error.assert_called_with(view_exception)
+ LogMock.info.assert_called_with('Rolled back current session')
+
|
Add test for exception handling in flask app
|
## Code Before:
from eachday.tests.base import BaseTestCase
import json
class TestResourceUtils(BaseTestCase):
def test_invalid_json_error(self):
''' Test that an invalid JSON body has a decent error message '''
resp = self.client.post(
'/register',
data='{"invalid": json}',
content_type='application/json'
)
data = json.loads(resp.data.decode())
self.assertEqual(resp.status_code, 400)
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Invalid JSON body')
## Instruction:
Add test for exception handling in flask app
## Code After:
from eachday.resources import LoginResource
from eachday.tests.base import BaseTestCase
from unittest.mock import patch
import json
class TestResourceUtils(BaseTestCase):
def test_invalid_json_error(self):
''' Test that an invalid JSON body has a decent error message '''
resp = self.client.post(
'/register',
data='{"invalid": json}',
content_type='application/json'
)
data = json.loads(resp.data.decode())
self.assertEqual(resp.status_code, 400)
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Invalid JSON body')
class TestExceptionHandler(BaseTestCase):
@patch('eachday.resources.db')
@patch('eachday.resources.log')
def test_exception_handling(self, LogMock, DbMock):
''' Test that internal server errors are handled gracefully '''
view_exception = Exception('Uh oh')
# Inject function into /login to raise an exception
def raise_error(self):
raise view_exception
LoginResource.post = raise_error
resp = self.client.post('/login')
self.assertEqual(resp.status_code, 500)
data = json.loads(resp.data.decode())
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Something went wrong.')
# Make sure session gets rolled back
self.assertTrue(DbMock.session.rollback.called)
# Make sure exception is logged correctly
LogMock.error.assert_called_with(view_exception)
LogMock.info.assert_called_with('Rolled back current session')
|
+ from eachday.resources import LoginResource
from eachday.tests.base import BaseTestCase
+ from unittest.mock import patch
import json
class TestResourceUtils(BaseTestCase):
def test_invalid_json_error(self):
''' Test that an invalid JSON body has a decent error message '''
resp = self.client.post(
'/register',
data='{"invalid": json}',
content_type='application/json'
)
data = json.loads(resp.data.decode())
self.assertEqual(resp.status_code, 400)
self.assertEqual(data['status'], 'error')
self.assertEqual(data['error'], 'Invalid JSON body')
+
+
+ class TestExceptionHandler(BaseTestCase):
+
+ @patch('eachday.resources.db')
+ @patch('eachday.resources.log')
+ def test_exception_handling(self, LogMock, DbMock):
+ ''' Test that internal server errors are handled gracefully '''
+ view_exception = Exception('Uh oh')
+
+ # Inject function into /login to raise an exception
+ def raise_error(self):
+ raise view_exception
+ LoginResource.post = raise_error
+
+ resp = self.client.post('/login')
+ self.assertEqual(resp.status_code, 500)
+ data = json.loads(resp.data.decode())
+ self.assertEqual(data['status'], 'error')
+ self.assertEqual(data['error'], 'Something went wrong.')
+
+ # Make sure session gets rolled back
+ self.assertTrue(DbMock.session.rollback.called)
+
+ # Make sure exception is logged correctly
+ LogMock.error.assert_called_with(view_exception)
+ LogMock.info.assert_called_with('Rolled back current session')
|
8a43cf58791a665a4fc23bc5d0911af61f7e1fb6
|
qipr_approver/approver/views/similar_projects.py
|
qipr_approver/approver/views/similar_projects.py
|
from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
|
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
|
Add shib auth to similar projects page
|
Add shib auth to similar projects page
|
Python
|
apache-2.0
|
DevMattM/qipr_approver,DevMattM/qipr_approver,ctsit/qipr_approver,ctsit/qipr_approver,ctsit/qipr_approver,DevMattM/qipr_approver,DevMattM/qipr_approver,PFWhite/qipr_approver,DevMattM/qipr_approver,PFWhite/qipr_approver,ctsit/qipr_approver,PFWhite/qipr_approver,ctsit/qipr_approver,PFWhite/qipr_approver,PFWhite/qipr_approver
|
from django.shortcuts import redirect
+ from django.core.urlresolvers import reverse
+ from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
- from approver.decorators import login_required
+
import approver.utils as utils
- from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
-
+
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
- }
+ }
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
+
|
Add shib auth to similar projects page
|
## Code Before:
from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
## Instruction:
Add shib auth to similar projects page
## Code After:
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
|
from django.shortcuts import redirect
+ from django.core.urlresolvers import reverse
+ from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
- from approver.decorators import login_required
+
import approver.utils as utils
- from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
-
+
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
- }
? --------
+ }
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
|
992525f8b371582598fa915128eccd3528e427a6
|
main.py
|
main.py
|
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
Fix config file path error on the server
|
Fix config file path error on the server
|
Python
|
mit
|
moreati/remarks,greatghoul/remarks,greatghoul/remarks,moreati/remarks,greatghoul/remarks,moreati/remarks
|
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
+ import os
app = Flask(__name__)
- app.config.from_pyfile('config.cfg', silent=True)
+ app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
Fix config file path error on the server
|
## Code Before:
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
## Instruction:
Fix config file path error on the server
## Code After:
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
+ import os
app = Flask(__name__)
- app.config.from_pyfile('config.cfg', silent=True)
+ app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
6e89594c231698d4f20590e723a876699876fb52
|
utils/__init__.py
|
utils/__init__.py
|
from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
Make Pylint ignore some unused imports
|
Make Pylint ignore some unused imports
Ignore them because they are required for the bot to work
|
Python
|
mit
|
wolfy1339/Python-IRC-Bot
|
from . import util
from . import irc
+ # pylint: enable=unused-import
+
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
Make Pylint ignore some unused imports
|
## Code Before:
from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
## Instruction:
Make Pylint ignore some unused imports
## Code After:
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
from . import util
from . import irc
+ # pylint: enable=unused-import
+
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
c044c804633612608bbbf61621551d892483f179
|
yaspin/spinners.py
|
yaspin/spinners.py
|
import codecs
import os
from collections import namedtuple
try:
import simplejson as json
except ImportError:
import json
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
SPINNERS_PATH = os.path.join(THIS_DIR, "data/spinners.json")
def _hook(dct):
return namedtuple("Spinner", dct.keys())(*dct.values())
with codecs.open(SPINNERS_PATH, encoding="utf-8") as f:
Spinners = json.load(f, object_hook=_hook)
|
import codecs
import pkgutil
from collections import namedtuple
try:
import simplejson as json
except ImportError:
import json
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
SPINNERS_DATA = pkgutil.get_data(__package__, "data/spinners.json").decode("utf-8")
def _hook(dct):
return namedtuple("Spinner", dct.keys())(*dct.values())
Spinners = json.loads(SPINNERS_DATA, object_hook=_hook)
|
Allow use inside zip bundled package
|
Allow use inside zip bundled package
|
Python
|
mit
|
pavdmyt/yaspin
|
import codecs
- import os
+ import pkgutil
from collections import namedtuple
try:
import simplejson as json
except ImportError:
import json
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
- SPINNERS_PATH = os.path.join(THIS_DIR, "data/spinners.json")
+ SPINNERS_DATA = pkgutil.get_data(__package__, "data/spinners.json").decode("utf-8")
def _hook(dct):
return namedtuple("Spinner", dct.keys())(*dct.values())
- with codecs.open(SPINNERS_PATH, encoding="utf-8") as f:
- Spinners = json.load(f, object_hook=_hook)
+ Spinners = json.loads(SPINNERS_DATA, object_hook=_hook)
|
Allow use inside zip bundled package
|
## Code Before:
import codecs
import os
from collections import namedtuple
try:
import simplejson as json
except ImportError:
import json
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
SPINNERS_PATH = os.path.join(THIS_DIR, "data/spinners.json")
def _hook(dct):
return namedtuple("Spinner", dct.keys())(*dct.values())
with codecs.open(SPINNERS_PATH, encoding="utf-8") as f:
Spinners = json.load(f, object_hook=_hook)
## Instruction:
Allow use inside zip bundled package
## Code After:
import codecs
import pkgutil
from collections import namedtuple
try:
import simplejson as json
except ImportError:
import json
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
SPINNERS_DATA = pkgutil.get_data(__package__, "data/spinners.json").decode("utf-8")
def _hook(dct):
return namedtuple("Spinner", dct.keys())(*dct.values())
Spinners = json.loads(SPINNERS_DATA, object_hook=_hook)
|
import codecs
- import os
+ import pkgutil
from collections import namedtuple
try:
import simplejson as json
except ImportError:
import json
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
- SPINNERS_PATH = os.path.join(THIS_DIR, "data/spinners.json")
+ SPINNERS_DATA = pkgutil.get_data(__package__, "data/spinners.json").decode("utf-8")
def _hook(dct):
return namedtuple("Spinner", dct.keys())(*dct.values())
- with codecs.open(SPINNERS_PATH, encoding="utf-8") as f:
- Spinners = json.load(f, object_hook=_hook)
? ---- ^
+ Spinners = json.loads(SPINNERS_DATA, object_hook=_hook)
? + ^^^^^^^^^^^^^
|
cad7093a3175868944acf1d2f62bad523e4f8a41
|
tests/unit/utils/test_thin.py
|
tests/unit/utils/test_thin.py
|
'''
:codeauthor: :email:`Bo Maryniuk <[email protected]>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
|
'''
:codeauthor: :email:`Bo Maryniuk <[email protected]>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
Add unit test for missing dependencies on get_ext_tops
|
Add unit test for missing dependencies on get_ext_tops
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
:codeauthor: :email:`Bo Maryniuk <[email protected]>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
- import salt.utils.ssdp as ssdp
+ from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
+ class SaltSyetemExitException(Exception):
+ '''
+ System
+ '''
+ def __init__(self):
+ Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
+
+
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
- def test_get_tops(self):
+ @patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
+ @patch('salt.utils.thin.log', MagicMock())
+ def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
+ cfg = [
+ {'namespace': {'path': '/foo', 'dependencies': []}},
+ ]
+ with pytest.raises(Exception) as err:
+ thin.get_ext_tops(cfg)
+ assert 'Dilithium Crystals' in str(err)
+ assert thin.log.error.called
+ assert 'Missing dependencies' in thin.log.error.call_args[0][0]
+ assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
Add unit test for missing dependencies on get_ext_tops
|
## Code Before:
'''
:codeauthor: :email:`Bo Maryniuk <[email protected]>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
import salt.utils.ssdp as ssdp
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
def test_get_tops(self):
'''
Test thin.get_tops
:return:
'''
## Instruction:
Add unit test for missing dependencies on get_ext_tops
## Code After:
'''
:codeauthor: :email:`Bo Maryniuk <[email protected]>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
class SaltSyetemExitException(Exception):
'''
System
'''
def __init__(self):
Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
@patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
@patch('salt.utils.thin.log', MagicMock())
def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
cfg = [
{'namespace': {'path': '/foo', 'dependencies': []}},
]
with pytest.raises(Exception) as err:
thin.get_ext_tops(cfg)
assert 'Dilithium Crystals' in str(err)
assert thin.log.error.called
assert 'Missing dependencies' in thin.log.error.call_args[0][0]
assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
'''
:codeauthor: :email:`Bo Maryniuk <[email protected]>`
'''
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salt.ext.six.moves import zip
from salt.ext import six
- import salt.utils.ssdp as ssdp
+ from salt.utils import thin
import salt.utils.stringutils
try:
import pytest
except ImportError:
pytest = None
+ class SaltSyetemExitException(Exception):
+ '''
+ System
+ '''
+ def __init__(self):
+ Exception.__init__(self, 'The Dilithium Crystals need to be rotated.')
+
+
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(pytest is None, 'PyTest is missing')
class SSHThinTestCase(TestCase):
'''
TestCase for SaltSSH-related parts.
'''
- def test_get_tops(self):
+ @patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException))
+ @patch('salt.utils.thin.log', MagicMock())
+ def test_get_ext_tops_cfg_missing_dependencies(self):
'''
Test thin.get_tops
:return:
'''
+ cfg = [
+ {'namespace': {'path': '/foo', 'dependencies': []}},
+ ]
+ with pytest.raises(Exception) as err:
+ thin.get_ext_tops(cfg)
+ assert 'Dilithium Crystals' in str(err)
+ assert thin.log.error.called
+ assert 'Missing dependencies' in thin.log.error.call_args[0][0]
+ assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
|
e37c7cace441e837120b820936c6f4ae8de78996
|
sts/controller_manager.py
|
sts/controller_manager.py
|
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
return self.uuid2controller.values()
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
for c in self.live_controllers:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
cs = self.uuid2controller.values()
cs.sort(key=lambda c: c.uuid)
return cs
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
live = list(self.live_controllers)
live.sort(key=lambda c: c.uuid)
for c in live:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
Make .contollers() deterministic (was using hash.values())
|
Make .contollers() deterministic (was using hash.values())
|
Python
|
apache-2.0
|
ucb-sts/sts,jmiserez/sts,ucb-sts/sts,jmiserez/sts
|
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
- return self.uuid2controller.values()
+ cs = self.uuid2controller.values()
+ cs.sort(key=lambda c: c.uuid)
+ return cs
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
- for c in self.live_controllers:
+ live = list(self.live_controllers)
+ live.sort(key=lambda c: c.uuid)
+ for c in live:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
Make .contollers() deterministic (was using hash.values())
|
## Code Before:
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
return self.uuid2controller.values()
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
for c in self.live_controllers:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
## Instruction:
Make .contollers() deterministic (was using hash.values())
## Code After:
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
cs = self.uuid2controller.values()
cs.sort(key=lambda c: c.uuid)
return cs
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
live = list(self.live_controllers)
live.sort(key=lambda c: c.uuid)
for c in live:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
- return self.uuid2controller.values()
? ^^^^^^
+ cs = self.uuid2controller.values()
? ^^^^^
+ cs.sort(key=lambda c: c.uuid)
+ return cs
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
- for c in self.live_controllers:
? ^^^ ^ ^^ ^
+ live = list(self.live_controllers)
? ^^^^ ^ + ^^^ ^
+ live.sort(key=lambda c: c.uuid)
+ for c in live:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
63c640f2d16b033cc8dff426768cd1c6cbaa5626
|
Lib/distutils/__init__.py
|
Lib/distutils/__init__.py
|
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
|
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <[email protected]> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
Revert to having static version numbers again.
|
Revert to having static version numbers again.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
- import sys
- __version__ = "%d.%d.%d" % sys.version_info[:3]
- del sys
+ # Distutils version
+ #
+ # Please coordinate with Marc-Andre Lemburg <[email protected]> when adding
+ # new features to distutils that would warrant bumping the version number.
+ #
+ # In general, major and minor version should loosely follow the Python
+ # version number the distutils code was shipped with.
+ #
+ __version__ = "2.5.0"
|
Revert to having static version numbers again.
|
## Code Before:
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
## Instruction:
Revert to having static version numbers again.
## Code After:
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <[email protected]> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
- import sys
- __version__ = "%d.%d.%d" % sys.version_info[:3]
- del sys
+ # Distutils version
+ #
+ # Please coordinate with Marc-Andre Lemburg <[email protected]> when adding
+ # new features to distutils that would warrant bumping the version number.
+ #
+ # In general, major and minor version should loosely follow the Python
+ # version number the distutils code was shipped with.
+ #
+ __version__ = "2.5.0"
|
c1785e0713a5af6b849baaa1b314a13ac777f3f5
|
tests/test_str_py3.py
|
tests/test_str_py3.py
|
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
import fastavro
from fastavro.compat import BytesIO
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
|
"""Python3 string tests for fastavro"""
from __future__ import absolute_import
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import fastavro
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
|
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually.
|
Test files shouldn't import 'fastavro.compat'. Just import BytesIO
manually.
|
Python
|
mit
|
e-heller/fastavro,e-heller/fastavro
|
+ """Python3 string tests for fastavro"""
+
+ from __future__ import absolute_import
+
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
+ try:
+ from cStringIO import StringIO as BytesIO
+ except ImportError:
+ from io import BytesIO
+
import fastavro
- from fastavro.compat import BytesIO
+
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
+
if __name__ == '__main__':
test_str_py3()
|
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually.
|
## Code Before:
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
import fastavro
from fastavro.compat import BytesIO
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
## Instruction:
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually.
## Code After:
"""Python3 string tests for fastavro"""
from __future__ import absolute_import
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import fastavro
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
|
+ """Python3 string tests for fastavro"""
+
+ from __future__ import absolute_import
+
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
+ try:
+ from cStringIO import StringIO as BytesIO
+ except ImportError:
+ from io import BytesIO
+
import fastavro
- from fastavro.compat import BytesIO
+
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
+
if __name__ == '__main__':
test_str_py3()
|
6e32cfd9b2640b4f119a3a8e4138c883fd4bcef0
|
_tests/test_scikit_ci_addons.py
|
_tests/test_scikit_ci_addons.py
|
import ci_addons
import os
import pytest
import subprocess
from . import captured_lines
def test_home():
expected_home = os.path.abspath(os.path.dirname(__file__) + '/..')
assert ci_addons.home() == expected_home
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_path(addon):
expected_path = ci_addons.home() + '/' + addon
if not addon.endswith('.py'):
expected_path += '.py'
assert ci_addons.path(addon) == expected_path
def test_list(capsys):
ci_addons.list_addons()
output_lines, _ = captured_lines(capsys)
assert 'anyci/noop.py' in output_lines
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_execute(addon, capfd):
ci_addons.execute(addon, ['foo', 'bar'])
output_lines, _ = captured_lines(capfd)
assert ci_addons.home() + '/anyci/noop.py foo bar' in output_lines
def test_cli():
root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
environment = dict(os.environ)
environment['PYTHONPATH'] = root
subprocess.check_call(
"python -m ci_addons",
shell=True,
env=environment,
stderr=subprocess.STDOUT,
cwd=str(root)
)
|
import ci_addons
import os
import pytest
import subprocess
from . import captured_lines
def test_home():
expected_home = os.path.abspath(os.path.dirname(__file__) + '/..')
assert ci_addons.home() == expected_home
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_path(addon):
expected_path = os.path.join(ci_addons.home(), addon)
if not addon.endswith('.py'):
expected_path += '.py'
assert ci_addons.path(addon) == expected_path
def test_list(capsys):
ci_addons.list_addons()
output_lines, _ = captured_lines(capsys)
assert 'anyci' + os.path.sep + 'noop.py' in output_lines
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_execute(addon, capfd):
ci_addons.execute(addon, ['foo', 'bar'])
output_lines, _ = captured_lines(capfd)
assert os.path.join(ci_addons.home(), 'anyci/noop.py foo bar') in output_lines
def test_cli():
root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
environment = dict(os.environ)
environment['PYTHONPATH'] = root
subprocess.check_call(
"python -m ci_addons",
shell=True,
env=environment,
stderr=subprocess.STDOUT,
cwd=str(root)
)
|
Fix failing tests on appveyor
|
ci: Fix failing tests on appveyor
|
Python
|
apache-2.0
|
scikit-build/scikit-ci-addons,scikit-build/scikit-ci-addons
|
import ci_addons
import os
import pytest
import subprocess
from . import captured_lines
def test_home():
expected_home = os.path.abspath(os.path.dirname(__file__) + '/..')
assert ci_addons.home() == expected_home
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_path(addon):
- expected_path = ci_addons.home() + '/' + addon
+ expected_path = os.path.join(ci_addons.home(), addon)
if not addon.endswith('.py'):
expected_path += '.py'
assert ci_addons.path(addon) == expected_path
def test_list(capsys):
ci_addons.list_addons()
output_lines, _ = captured_lines(capsys)
- assert 'anyci/noop.py' in output_lines
+ assert 'anyci' + os.path.sep + 'noop.py' in output_lines
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_execute(addon, capfd):
ci_addons.execute(addon, ['foo', 'bar'])
output_lines, _ = captured_lines(capfd)
- assert ci_addons.home() + '/anyci/noop.py foo bar' in output_lines
+ assert os.path.join(ci_addons.home(), 'anyci/noop.py foo bar') in output_lines
def test_cli():
root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
environment = dict(os.environ)
environment['PYTHONPATH'] = root
subprocess.check_call(
"python -m ci_addons",
shell=True,
env=environment,
stderr=subprocess.STDOUT,
cwd=str(root)
)
|
Fix failing tests on appveyor
|
## Code Before:
import ci_addons
import os
import pytest
import subprocess
from . import captured_lines
def test_home():
expected_home = os.path.abspath(os.path.dirname(__file__) + '/..')
assert ci_addons.home() == expected_home
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_path(addon):
expected_path = ci_addons.home() + '/' + addon
if not addon.endswith('.py'):
expected_path += '.py'
assert ci_addons.path(addon) == expected_path
def test_list(capsys):
ci_addons.list_addons()
output_lines, _ = captured_lines(capsys)
assert 'anyci/noop.py' in output_lines
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_execute(addon, capfd):
ci_addons.execute(addon, ['foo', 'bar'])
output_lines, _ = captured_lines(capfd)
assert ci_addons.home() + '/anyci/noop.py foo bar' in output_lines
def test_cli():
root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
environment = dict(os.environ)
environment['PYTHONPATH'] = root
subprocess.check_call(
"python -m ci_addons",
shell=True,
env=environment,
stderr=subprocess.STDOUT,
cwd=str(root)
)
## Instruction:
Fix failing tests on appveyor
## Code After:
import ci_addons
import os
import pytest
import subprocess
from . import captured_lines
def test_home():
expected_home = os.path.abspath(os.path.dirname(__file__) + '/..')
assert ci_addons.home() == expected_home
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_path(addon):
expected_path = os.path.join(ci_addons.home(), addon)
if not addon.endswith('.py'):
expected_path += '.py'
assert ci_addons.path(addon) == expected_path
def test_list(capsys):
ci_addons.list_addons()
output_lines, _ = captured_lines(capsys)
assert 'anyci' + os.path.sep + 'noop.py' in output_lines
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_execute(addon, capfd):
ci_addons.execute(addon, ['foo', 'bar'])
output_lines, _ = captured_lines(capfd)
assert os.path.join(ci_addons.home(), 'anyci/noop.py foo bar') in output_lines
def test_cli():
root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
environment = dict(os.environ)
environment['PYTHONPATH'] = root
subprocess.check_call(
"python -m ci_addons",
shell=True,
env=environment,
stderr=subprocess.STDOUT,
cwd=str(root)
)
|
import ci_addons
import os
import pytest
import subprocess
from . import captured_lines
def test_home():
expected_home = os.path.abspath(os.path.dirname(__file__) + '/..')
assert ci_addons.home() == expected_home
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_path(addon):
- expected_path = ci_addons.home() + '/' + addon
? ^^^^^^^^
+ expected_path = os.path.join(ci_addons.home(), addon)
? +++++++++++++ ^ +
if not addon.endswith('.py'):
expected_path += '.py'
assert ci_addons.path(addon) == expected_path
def test_list(capsys):
ci_addons.list_addons()
output_lines, _ = captured_lines(capsys)
- assert 'anyci/noop.py' in output_lines
? ^
+ assert 'anyci' + os.path.sep + 'noop.py' in output_lines
? ^^^^^^^^^^^^^^^^^^^
@pytest.mark.parametrize("addon", ['anyci/noop', 'anyci/noop.py'])
def test_execute(addon, capfd):
ci_addons.execute(addon, ['foo', 'bar'])
output_lines, _ = captured_lines(capfd)
- assert ci_addons.home() + '/anyci/noop.py foo bar' in output_lines
? ^^ -
+ assert os.path.join(ci_addons.home(), 'anyci/noop.py foo bar') in output_lines
? +++++++++++++ ^ +
def test_cli():
root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
environment = dict(os.environ)
environment['PYTHONPATH'] = root
subprocess.check_call(
"python -m ci_addons",
shell=True,
env=environment,
stderr=subprocess.STDOUT,
cwd=str(root)
)
|
37eeecd3d4d1e6d2972565961b5c31731ae55ec7
|
tests/tester.py
|
tests/tester.py
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
Test the an empty document results in a test failure.
|
Test the an empty document results in a test failure.
|
Python
|
mit
|
bnkr/servequnit,bnkr/servequnit,bnkr/servequnit,bnkr/selenit,bnkr/selenit
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
- passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
+ test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
+ factory = ServerFactory().bind_script("test", test_file)
- factory = ServerFactory()
- factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
- passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
+ test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
+ factory = ServerFactory().bind_script("test", test_file)
- factory = ServerFactory()
- factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
+ def test_failing_test_reports_no_tests(self):
+ factory = ServerFactory().bind_script("test", "/dev/null")
+ with factory.run() as server:
+ tester = self._make_tester(server)
+ self.assertRaises(TestFailedError, tester.run)
+
|
Test the an empty document results in a test failure.
|
## Code Before:
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
## Instruction:
Test the an empty document results in a test failure.
## Code After:
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
- passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
? ^^ ^ ^^
+ test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
? ^^ ^^^ ^^
+ factory = ServerFactory().bind_script("test", test_file)
- factory = ServerFactory()
- factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
- passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
? ^^ ^ ^^
+ test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
? ^^ ^^^ ^^
+ factory = ServerFactory().bind_script("test", test_file)
- factory = ServerFactory()
- factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
+
+ def test_failing_test_reports_no_tests(self):
+ factory = ServerFactory().bind_script("test", "/dev/null")
+ with factory.run() as server:
+ tester = self._make_tester(server)
+ self.assertRaises(TestFailedError, tester.run)
|
d1330eb44b1842571ff7deacef175b9aa92e09c0
|
openacademy/models/res_partner.py
|
openacademy/models/res_partner.py
|
from openerp import models, fields
class Partner(models.Model):
_inherit = 'res.partner'
instructor = fields.Boolean(help="This partner give train our course")
|
from openerp import models, fields
class Partner(models.Model):
_inherit = 'res.partner'
instructor = fields.Boolean(default=False,help="This partner give train our course")
sessions = fields.Many2many('session', string="Session as instructor", readonly=True)
|
Add partner view injerit and model inherit
|
[REF] openacademy: Add partner view injerit and model inherit
|
Python
|
apache-2.0
|
juancr83/DockerOpenacademy-proyect
|
from openerp import models, fields
class Partner(models.Model):
_inherit = 'res.partner'
- instructor = fields.Boolean(help="This partner give train our course")
+ instructor = fields.Boolean(default=False,help="This partner give train our course")
+ sessions = fields.Many2many('session', string="Session as instructor", readonly=True)
|
Add partner view injerit and model inherit
|
## Code Before:
from openerp import models, fields
class Partner(models.Model):
_inherit = 'res.partner'
instructor = fields.Boolean(help="This partner give train our course")
## Instruction:
Add partner view injerit and model inherit
## Code After:
from openerp import models, fields
class Partner(models.Model):
_inherit = 'res.partner'
instructor = fields.Boolean(default=False,help="This partner give train our course")
sessions = fields.Many2many('session', string="Session as instructor", readonly=True)
|
from openerp import models, fields
class Partner(models.Model):
_inherit = 'res.partner'
- instructor = fields.Boolean(help="This partner give train our course")
+ instructor = fields.Boolean(default=False,help="This partner give train our course")
? ++++++++++++++
+ sessions = fields.Many2many('session', string="Session as instructor", readonly=True)
|
bf5307afe52415960d0ffc794f687b0ecebb48da
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
from app import views, models
|
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
##################
# Logging system #
##################
# Open a file rotated every 100MB
file_handler = RotatingFileHandler('tmp/cineapp.log', 'a', 100 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Cineapp startup')
from app import views, models
|
Enable file logging for the application.
|
Enable file logging for the application.
|
Python
|
mit
|
ptitoliv/cineapp,ptitoliv/cineapp,ptitoliv/cineapp
|
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
+ import logging
+ from logging.handlers import RotatingFileHandler
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
+ ##################
+ # Logging system #
+ ##################
+
+ # Open a file rotated every 100MB
+ file_handler = RotatingFileHandler('tmp/cineapp.log', 'a', 100 * 1024 * 1024, 10)
+ file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
+ app.logger.setLevel(logging.INFO)
+ app.logger.addHandler(file_handler)
+ app.logger.info('Cineapp startup')
+
from app import views, models
|
Enable file logging for the application.
|
## Code Before:
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
from app import views, models
## Instruction:
Enable file logging for the application.
## Code After:
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
##################
# Logging system #
##################
# Open a file rotated every 100MB
file_handler = RotatingFileHandler('tmp/cineapp.log', 'a', 100 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Cineapp startup')
from app import views, models
|
from flask import Flask
from flask.ext.login import login_user, logout_user, current_user, login_required, LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
from flask import Flask, session
from flask.ext.session import Session
from flask.ext.mail import Mail
+ import logging
+ from logging.handlers import RotatingFileHandler
app = Flask(__name__)
# Configuration file reading
app.config.from_object('config')
# Database Initialization
db = SQLAlchemy(app)
# Login manager init
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Session Manager Init
sess = Session()
sess.init_app(app)
# Mail engine init
mail = Mail(app)
+ ##################
+ # Logging system #
+ ##################
+
+ # Open a file rotated every 100MB
+ file_handler = RotatingFileHandler('tmp/cineapp.log', 'a', 100 * 1024 * 1024, 10)
+ file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
+ app.logger.setLevel(logging.INFO)
+ app.logger.addHandler(file_handler)
+ app.logger.info('Cineapp startup')
+
from app import views, models
|
2ba28c83de33ebc75f386d127d0c55e17248a94b
|
mapclientplugins/meshgeneratorstep/__init__.py
|
mapclientplugins/meshgeneratorstep/__init__.py
|
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
__location__ = ''
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
|
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
__location__ = 'https://github.com/ABI-Software/mapclientplugins.meshgeneratorstep'
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
|
Add location to step metadata.
|
Add location to step metadata.
|
Python
|
apache-2.0
|
rchristie/mapclientplugins.meshgeneratorstep
|
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
- __location__ = ''
+ __location__ = 'https://github.com/ABI-Software/mapclientplugins.meshgeneratorstep'
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
+
|
Add location to step metadata.
|
## Code Before:
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
__location__ = ''
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
## Instruction:
Add location to step metadata.
## Code After:
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
__location__ = 'https://github.com/ABI-Software/mapclientplugins.meshgeneratorstep'
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
|
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
- __location__ = ''
+ __location__ = 'https://github.com/ABI-Software/mapclientplugins.meshgeneratorstep'
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
|
fb9591c4a2801bfe5f5380c3e33aa44a25db3591
|
customforms/models.py
|
customforms/models.py
|
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.form.id)])
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
Add absolute URLs to form and question admin
|
Add absolute URLs to form and question admin
|
Python
|
apache-2.0
|
cschwede/django-customforms
|
+ from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
+ def get_absolute_url(self):
+ return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
+ def get_absolute_url(self):
+ return reverse('customforms.views.view_form', args=[str(self.form.id)])
+
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
Add absolute URLs to form and question admin
|
## Code Before:
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
## Instruction:
Add absolute URLs to form and question admin
## Code After:
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.form.id)])
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
+ from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
+ def get_absolute_url(self):
+ return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
+ def get_absolute_url(self):
+ return reverse('customforms.views.view_form', args=[str(self.form.id)])
+
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
af91b7c2612fab598ba50c0c0256f7e552098d92
|
reportlab/docs/genAll.py
|
reportlab/docs/genAll.py
|
"""Runs the three manual-building scripts"""
if __name__=='__main__':
import os, sys
d = os.path.dirname(sys.argv[0])
#need a quiet mode for the test suite
if '-s' in sys.argv: # 'silent
quiet = '-s'
else:
quiet = ''
if not d: d = '.'
if not os.path.isabs(d):
d = os.path.normpath(os.path.join(os.getcwd(),d))
for p in ('reference/genreference.py',
'userguide/genuserguide.py',
'graphguide/gengraphguide.py',
'../tools/docco/graphdocpy.py'):
os.chdir(d)
os.chdir(os.path.dirname(p))
os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
|
import os
def _genAll(d=None,quiet=''):
if not d: d = '.'
if not os.path.isabs(d):
d = os.path.normpath(os.path.join(os.getcwd(),d))
for p in ('reference/genreference.py',
'userguide/genuserguide.py',
'graphguide/gengraphguide.py',
'../tools/docco/graphdocpy.py'):
os.chdir(d)
os.chdir(os.path.dirname(p))
os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
"""Runs the manual-building scripts"""
if __name__=='__main__':
import sys
#need a quiet mode for the test suite
if '-s' in sys.argv: # 'silent
quiet = '-s'
else:
quiet = ''
_genAll(os.path.dirname(sys.argv[0]),quiet)
|
Allow for use in daily.py
|
Allow for use in daily.py
|
Python
|
bsd-3-clause
|
makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile
|
+ import os
+ def _genAll(d=None,quiet=''):
+ if not d: d = '.'
+ if not os.path.isabs(d):
+ d = os.path.normpath(os.path.join(os.getcwd(),d))
+ for p in ('reference/genreference.py',
+ 'userguide/genuserguide.py',
+ 'graphguide/gengraphguide.py',
+ '../tools/docco/graphdocpy.py'):
+ os.chdir(d)
+ os.chdir(os.path.dirname(p))
+ os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
+
- """Runs the three manual-building scripts"""
+ """Runs the manual-building scripts"""
if __name__=='__main__':
- import os, sys
- d = os.path.dirname(sys.argv[0])
+ import sys
+ #need a quiet mode for the test suite
+ if '-s' in sys.argv: # 'silent
+ quiet = '-s'
+ else:
+ quiet = ''
+ _genAll(os.path.dirname(sys.argv[0]),quiet)
- #need a quiet mode for the test suite
- if '-s' in sys.argv: # 'silent
- quiet = '-s'
- else:
- quiet = ''
-
- if not d: d = '.'
- if not os.path.isabs(d):
- d = os.path.normpath(os.path.join(os.getcwd(),d))
- for p in ('reference/genreference.py',
- 'userguide/genuserguide.py',
- 'graphguide/gengraphguide.py',
- '../tools/docco/graphdocpy.py'):
- os.chdir(d)
- os.chdir(os.path.dirname(p))
- os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
-
|
Allow for use in daily.py
|
## Code Before:
"""Runs the three manual-building scripts"""
if __name__=='__main__':
import os, sys
d = os.path.dirname(sys.argv[0])
#need a quiet mode for the test suite
if '-s' in sys.argv: # 'silent
quiet = '-s'
else:
quiet = ''
if not d: d = '.'
if not os.path.isabs(d):
d = os.path.normpath(os.path.join(os.getcwd(),d))
for p in ('reference/genreference.py',
'userguide/genuserguide.py',
'graphguide/gengraphguide.py',
'../tools/docco/graphdocpy.py'):
os.chdir(d)
os.chdir(os.path.dirname(p))
os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
## Instruction:
Allow for use in daily.py
## Code After:
import os
def _genAll(d=None,quiet=''):
if not d: d = '.'
if not os.path.isabs(d):
d = os.path.normpath(os.path.join(os.getcwd(),d))
for p in ('reference/genreference.py',
'userguide/genuserguide.py',
'graphguide/gengraphguide.py',
'../tools/docco/graphdocpy.py'):
os.chdir(d)
os.chdir(os.path.dirname(p))
os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
"""Runs the manual-building scripts"""
if __name__=='__main__':
import sys
#need a quiet mode for the test suite
if '-s' in sys.argv: # 'silent
quiet = '-s'
else:
quiet = ''
_genAll(os.path.dirname(sys.argv[0]),quiet)
|
+ import os
+ def _genAll(d=None,quiet=''):
+ if not d: d = '.'
+ if not os.path.isabs(d):
+ d = os.path.normpath(os.path.join(os.getcwd(),d))
+ for p in ('reference/genreference.py',
+ 'userguide/genuserguide.py',
+ 'graphguide/gengraphguide.py',
+ '../tools/docco/graphdocpy.py'):
+ os.chdir(d)
+ os.chdir(os.path.dirname(p))
+ os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
+
- """Runs the three manual-building scripts"""
? ------
+ """Runs the manual-building scripts"""
if __name__=='__main__':
+ import sys
- import os, sys
- d = os.path.dirname(sys.argv[0])
-
- #need a quiet mode for the test suite
? ^^^^ ^^^
+ #need a quiet mode for the test suite
? ^ ^
- if '-s' in sys.argv: # 'silent
? ^^^^
+ if '-s' in sys.argv: # 'silent
? ^
+ quiet = '-s'
+ else:
+ quiet = ''
+ _genAll(os.path.dirname(sys.argv[0]),quiet)
- quiet = '-s'
- else:
- quiet = ''
-
- if not d: d = '.'
- if not os.path.isabs(d):
- d = os.path.normpath(os.path.join(os.getcwd(),d))
- for p in ('reference/genreference.py',
- 'userguide/genuserguide.py',
- 'graphguide/gengraphguide.py',
- '../tools/docco/graphdocpy.py'):
- os.chdir(d)
- os.chdir(os.path.dirname(p))
- os.system('%s %s %s' % (sys.executable,os.path.basename(p), quiet))
|
3b9d15fcedd5edbe6dcf8ad58e9dbee0cecb6a04
|
sentry/core/processors.py
|
sentry/core/processors.py
|
class Processor(object):
def process(self, data):
resp = self.get_data(data)
if resp:
data['extra'].update(resp)
return data
def get_data(self, data):
return {}
from pprint import pprint
def sanitize_passwords_processor(data):
""" Asterisk out passwords from password fields in frames.
"""
if 'sentry.interfaces.Exception' in data:
if 'frames' in data['sentry.interfaces.Exception']:
for frame in data['sentry.interfaces.Exception']['frames']:
if 'vars' in frame:
print frame['vars']
for k,v in frame['vars'].iteritems():
if k.startswith('password'):
frame['vars'][k] = '*'*len(v)
return data
#class SantizePasswordsProcessor(Processor):
|
class Processor(object):
def process(self, data):
resp = self.get_data(data)
if resp:
data['extra'].update(resp)
return data
def get_data(self, data):
return {}
def sanitize_passwords_processor(data):
""" Asterisk out passwords from password fields in frames.
"""
if 'sentry.interfaces.Exception' in data:
if 'frames' in data['sentry.interfaces.Exception']:
for frame in data['sentry.interfaces.Exception']['frames']:
if 'vars' in frame:
for k,v in frame['vars'].iteritems():
if k.startswith('password'):
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return data
#class SantizePasswordsProcessor(Processor):
|
Remove print statement and change mask to use a fixed length
|
Remove print statement and change mask to use a fixed length
|
Python
|
bsd-3-clause
|
dcramer/sentry-old,dcramer/sentry-old,dcramer/sentry-old
|
class Processor(object):
def process(self, data):
resp = self.get_data(data)
if resp:
data['extra'].update(resp)
return data
def get_data(self, data):
return {}
- from pprint import pprint
def sanitize_passwords_processor(data):
""" Asterisk out passwords from password fields in frames.
"""
if 'sentry.interfaces.Exception' in data:
if 'frames' in data['sentry.interfaces.Exception']:
for frame in data['sentry.interfaces.Exception']['frames']:
if 'vars' in frame:
- print frame['vars']
for k,v in frame['vars'].iteritems():
if k.startswith('password'):
+ # store mask as a fixed length for security
- frame['vars'][k] = '*'*len(v)
+ frame['vars'][k] = '*'*16
return data
#class SantizePasswordsProcessor(Processor):
|
Remove print statement and change mask to use a fixed length
|
## Code Before:
class Processor(object):
def process(self, data):
resp = self.get_data(data)
if resp:
data['extra'].update(resp)
return data
def get_data(self, data):
return {}
from pprint import pprint
def sanitize_passwords_processor(data):
""" Asterisk out passwords from password fields in frames.
"""
if 'sentry.interfaces.Exception' in data:
if 'frames' in data['sentry.interfaces.Exception']:
for frame in data['sentry.interfaces.Exception']['frames']:
if 'vars' in frame:
print frame['vars']
for k,v in frame['vars'].iteritems():
if k.startswith('password'):
frame['vars'][k] = '*'*len(v)
return data
#class SantizePasswordsProcessor(Processor):
## Instruction:
Remove print statement and change mask to use a fixed length
## Code After:
class Processor(object):
def process(self, data):
resp = self.get_data(data)
if resp:
data['extra'].update(resp)
return data
def get_data(self, data):
return {}
def sanitize_passwords_processor(data):
""" Asterisk out passwords from password fields in frames.
"""
if 'sentry.interfaces.Exception' in data:
if 'frames' in data['sentry.interfaces.Exception']:
for frame in data['sentry.interfaces.Exception']['frames']:
if 'vars' in frame:
for k,v in frame['vars'].iteritems():
if k.startswith('password'):
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return data
#class SantizePasswordsProcessor(Processor):
|
class Processor(object):
def process(self, data):
resp = self.get_data(data)
if resp:
data['extra'].update(resp)
return data
def get_data(self, data):
return {}
- from pprint import pprint
def sanitize_passwords_processor(data):
""" Asterisk out passwords from password fields in frames.
"""
if 'sentry.interfaces.Exception' in data:
if 'frames' in data['sentry.interfaces.Exception']:
for frame in data['sentry.interfaces.Exception']['frames']:
if 'vars' in frame:
- print frame['vars']
for k,v in frame['vars'].iteritems():
if k.startswith('password'):
+ # store mask as a fixed length for security
- frame['vars'][k] = '*'*len(v)
? ^^^^^^
+ frame['vars'][k] = '*'*16
? ^^
return data
#class SantizePasswordsProcessor(Processor):
|
c5996b4a933f2d27251e8d85f3392b715e130759
|
mapentity/templatetags/convert_tags.py
|
mapentity/templatetags/convert_tags.py
|
import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
Support conversion format as extension, instead of mimetype
|
Support conversion format as extension, instead of mimetype
|
Python
|
bsd-3-clause
|
Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,Anaethelion/django-mapentity
|
import urllib
+ from mimetypes import types_map
+
from django import template
from django.conf import settings
+
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
+ if '/' not in format:
+ extension = '.' + format if not format.startswith('.') else format
+ format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
Support conversion format as extension, instead of mimetype
|
## Code Before:
import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
## Instruction:
Support conversion format as extension, instead of mimetype
## Code After:
import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
import urllib
+ from mimetypes import types_map
+
from django import template
from django.conf import settings
+
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
+ if '/' not in format:
+ extension = '.' + format if not format.startswith('.') else format
+ format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
3bcd36a063b112edb657a739287c6a2db3141746
|
appolo/models.py
|
appolo/models.py
|
from django.db import models
class Locatie(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
class Dag(models.Model):
def __unicode__(self):
return unicode(self.datum)
datum = models.DateField()
class Activiteit(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag)
locatie = models.ForeignKey(Locatie)
class Nieuwsitem(models.Model):
def __unicode__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
class Hashtag(models.Model):
def __unicode__(self):
return self.tekst
tekst = models.CharField(max_length=200)
|
from django.db import models
class Locatie(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
class Meta:
verbose_name_plural = 'locaties'
class Dag(models.Model):
def __unicode__(self):
return unicode(self.datum)
datum = models.DateField()
class Meta:
verbose_name_plural = 'dagen'
class Activiteit(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag)
locatie = models.ForeignKey(Locatie)
class Meta:
verbose_name_plural = 'activiteiten'
class Nieuwsitem(models.Model):
def __unicode__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
class Meta:
verbose_name_plural = 'nieuwsitems'
class Hashtag(models.Model):
def __unicode__(self):
return self.tekst
tekst = models.CharField(max_length=200)
class Meta:
verbose_name_plural = 'hashtags'
|
Correct meervoud modellen van appolo
|
Correct meervoud modellen van appolo
|
Python
|
mit
|
jonge-democraten/zues,jonge-democraten/zues,jonge-democraten/zues
|
from django.db import models
class Locatie(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
+ class Meta:
+ verbose_name_plural = 'locaties'
+
class Dag(models.Model):
def __unicode__(self):
return unicode(self.datum)
datum = models.DateField()
+
+ class Meta:
+ verbose_name_plural = 'dagen'
class Activiteit(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag)
locatie = models.ForeignKey(Locatie)
+ class Meta:
+ verbose_name_plural = 'activiteiten'
+
class Nieuwsitem(models.Model):
def __unicode__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
+ class Meta:
+ verbose_name_plural = 'nieuwsitems'
+
class Hashtag(models.Model):
def __unicode__(self):
return self.tekst
tekst = models.CharField(max_length=200)
+ class Meta:
+ verbose_name_plural = 'hashtags'
+
|
Correct meervoud modellen van appolo
|
## Code Before:
from django.db import models
class Locatie(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
class Dag(models.Model):
def __unicode__(self):
return unicode(self.datum)
datum = models.DateField()
class Activiteit(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag)
locatie = models.ForeignKey(Locatie)
class Nieuwsitem(models.Model):
def __unicode__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
class Hashtag(models.Model):
def __unicode__(self):
return self.tekst
tekst = models.CharField(max_length=200)
## Instruction:
Correct meervoud modellen van appolo
## Code After:
from django.db import models
class Locatie(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
class Meta:
verbose_name_plural = 'locaties'
class Dag(models.Model):
def __unicode__(self):
return unicode(self.datum)
datum = models.DateField()
class Meta:
verbose_name_plural = 'dagen'
class Activiteit(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag)
locatie = models.ForeignKey(Locatie)
class Meta:
verbose_name_plural = 'activiteiten'
class Nieuwsitem(models.Model):
def __unicode__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
class Meta:
verbose_name_plural = 'nieuwsitems'
class Hashtag(models.Model):
def __unicode__(self):
return self.tekst
tekst = models.CharField(max_length=200)
class Meta:
verbose_name_plural = 'hashtags'
|
from django.db import models
class Locatie(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
+ class Meta:
+ verbose_name_plural = 'locaties'
+
class Dag(models.Model):
def __unicode__(self):
return unicode(self.datum)
datum = models.DateField()
+
+ class Meta:
+ verbose_name_plural = 'dagen'
class Activiteit(models.Model):
def __unicode__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag)
locatie = models.ForeignKey(Locatie)
+ class Meta:
+ verbose_name_plural = 'activiteiten'
+
class Nieuwsitem(models.Model):
def __unicode__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
+ class Meta:
+ verbose_name_plural = 'nieuwsitems'
+
class Hashtag(models.Model):
def __unicode__(self):
return self.tekst
tekst = models.CharField(max_length=200)
+
+ class Meta:
+ verbose_name_plural = 'hashtags'
|
eca0f263e8a944a144a08f130e06aeb651e645b4
|
social/apps/django_app/urls.py
|
social/apps/django_app/urls.py
|
"""URLs module"""
from django.conf import settings
try:
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.4
from django.conf.urls.defaults import patterns, url
from social.utils import setting_name
extra = getattr(settings, setting_name('TRAILING_SLASH'), True) and '/' or ''
urlpatterns = patterns('social.apps.django_app.views',
# authentication / association
url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), 'auth',
name='begin'),
url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), 'complete',
name='complete'),
# disconnection
url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), 'disconnect',
name='disconnect'),
url(r'^disconnect/(?P<backend>[^/]+)/(?P<association_id>[^/]+){0}$'
.format(extra), 'disconnect', name='disconnect_individual'),
)
|
"""URLs module"""
from django.conf import settings
try:
from django.conf.urls import url
except ImportError:
# Django < 1.4
from django.conf.urls.defaults import url
from social.utils import setting_name
from social.apps.django_app import views
extra = getattr(settings, setting_name('TRAILING_SLASH'), True) and '/' or ''
urlpatterns = [
# authentication / association
url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), views.auth,
name='begin'),
url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), views.complete,
name='complete'),
# disconnection
url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), views.disconnect,
name='disconnect'),
url(r'^disconnect/(?P<backend>[^/]+)/(?P<association_id>[^/]+){0}$'
.format(extra), views.disconnect, name='disconnect_individual'),
]
|
Fix Django 1.10 deprecation warnings
|
Fix Django 1.10 deprecation warnings
In django_app/urls.py:
* Use a list instead of `patterns`
* Use view callables instead of strings
Fixes #804, #754
|
Python
|
bsd-3-clause
|
tkajtoch/python-social-auth,cjltsod/python-social-auth,python-social-auth/social-core,S01780/python-social-auth,tobias47n9e/social-core,fearlessspider/python-social-auth,tkajtoch/python-social-auth,python-social-auth/social-app-cherrypy,python-social-auth/social-app-django,fearlessspider/python-social-auth,python-social-auth/social-app-django,python-social-auth/social-core,cjltsod/python-social-auth,merutak/python-social-auth,python-social-auth/social-docs,fearlessspider/python-social-auth,merutak/python-social-auth,webjunkie/python-social-auth,python-social-auth/social-app-django,tkajtoch/python-social-auth,rsalmaso/python-social-auth,S01780/python-social-auth,webjunkie/python-social-auth,merutak/python-social-auth,webjunkie/python-social-auth,rsalmaso/python-social-auth,S01780/python-social-auth,python-social-auth/social-storage-sqlalchemy
|
"""URLs module"""
from django.conf import settings
try:
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
except ImportError:
# Django < 1.4
- from django.conf.urls.defaults import patterns, url
+ from django.conf.urls.defaults import url
from social.utils import setting_name
+ from social.apps.django_app import views
extra = getattr(settings, setting_name('TRAILING_SLASH'), True) and '/' or ''
- urlpatterns = patterns('social.apps.django_app.views',
+ urlpatterns = [
# authentication / association
- url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), 'auth',
+ url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), views.auth,
name='begin'),
- url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), 'complete',
+ url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), views.complete,
name='complete'),
# disconnection
- url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), 'disconnect',
+ url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), views.disconnect,
name='disconnect'),
url(r'^disconnect/(?P<backend>[^/]+)/(?P<association_id>[^/]+){0}$'
- .format(extra), 'disconnect', name='disconnect_individual'),
+ .format(extra), views.disconnect, name='disconnect_individual'),
- )
+ ]
|
Fix Django 1.10 deprecation warnings
|
## Code Before:
"""URLs module"""
from django.conf import settings
try:
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.4
from django.conf.urls.defaults import patterns, url
from social.utils import setting_name
extra = getattr(settings, setting_name('TRAILING_SLASH'), True) and '/' or ''
urlpatterns = patterns('social.apps.django_app.views',
# authentication / association
url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), 'auth',
name='begin'),
url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), 'complete',
name='complete'),
# disconnection
url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), 'disconnect',
name='disconnect'),
url(r'^disconnect/(?P<backend>[^/]+)/(?P<association_id>[^/]+){0}$'
.format(extra), 'disconnect', name='disconnect_individual'),
)
## Instruction:
Fix Django 1.10 deprecation warnings
## Code After:
"""URLs module"""
from django.conf import settings
try:
from django.conf.urls import url
except ImportError:
# Django < 1.4
from django.conf.urls.defaults import url
from social.utils import setting_name
from social.apps.django_app import views
extra = getattr(settings, setting_name('TRAILING_SLASH'), True) and '/' or ''
urlpatterns = [
# authentication / association
url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), views.auth,
name='begin'),
url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), views.complete,
name='complete'),
# disconnection
url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), views.disconnect,
name='disconnect'),
url(r'^disconnect/(?P<backend>[^/]+)/(?P<association_id>[^/]+){0}$'
.format(extra), views.disconnect, name='disconnect_individual'),
]
|
"""URLs module"""
from django.conf import settings
try:
- from django.conf.urls import patterns, url
? ----------
+ from django.conf.urls import url
except ImportError:
# Django < 1.4
- from django.conf.urls.defaults import patterns, url
? ----------
+ from django.conf.urls.defaults import url
from social.utils import setting_name
+ from social.apps.django_app import views
extra = getattr(settings, setting_name('TRAILING_SLASH'), True) and '/' or ''
- urlpatterns = patterns('social.apps.django_app.views',
+ urlpatterns = [
# authentication / association
- url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), 'auth',
? ^ -
+ url(r'^login/(?P<backend>[^/]+){0}$'.format(extra), views.auth,
? ^^^^^^
name='begin'),
- url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), 'complete',
? ^ -
+ url(r'^complete/(?P<backend>[^/]+){0}$'.format(extra), views.complete,
? ^^^^^^
name='complete'),
# disconnection
- url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), 'disconnect',
? ^ -
+ url(r'^disconnect/(?P<backend>[^/]+){0}$'.format(extra), views.disconnect,
? ^^^^^^
name='disconnect'),
url(r'^disconnect/(?P<backend>[^/]+)/(?P<association_id>[^/]+){0}$'
- .format(extra), 'disconnect', name='disconnect_individual'),
? ---- ^ -
+ .format(extra), views.disconnect, name='disconnect_individual'),
? ^^^^^^
- )
+ ]
|
c61929f0d0d8dbf53ef3c9ff2a98cf8f249bfca4
|
handlers/base_handler.py
|
handlers/base_handler.py
|
from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
if offset < 0:
raise IndexError("File offset must be greater than 0")
if offset + size >= len(self.file):
raise IndexError("Cannot read beyond the end of the file")
return self.file[offset:offset + size]
|
from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
return self.file[offset:offset + size]
|
Revert "Add bounds checking to BaseHandler.read()"
|
Revert "Add bounds checking to BaseHandler.read()"
This reverts commit 045ead44ef69d6ebf2cb0dddf084762efcc62995.
|
Python
|
mit
|
drx/rom-info
|
from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
- if offset < 0:
- raise IndexError("File offset must be greater than 0")
-
- if offset + size >= len(self.file):
- raise IndexError("Cannot read beyond the end of the file")
-
return self.file[offset:offset + size]
|
Revert "Add bounds checking to BaseHandler.read()"
|
## Code Before:
from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
if offset < 0:
raise IndexError("File offset must be greater than 0")
if offset + size >= len(self.file):
raise IndexError("Cannot read beyond the end of the file")
return self.file[offset:offset + size]
## Instruction:
Revert "Add bounds checking to BaseHandler.read()"
## Code After:
from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
return self.file[offset:offset + size]
|
from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
- if offset < 0:
- raise IndexError("File offset must be greater than 0")
-
- if offset + size >= len(self.file):
- raise IndexError("Cannot read beyond the end of the file")
-
return self.file[offset:offset + size]
|
8840cedd74c6c1959358366a88a85e7567b84439
|
tests/test_vector2_negation.py
|
tests/test_vector2_negation.py
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
Test that negation is involutive
|
tests/negation: Test that negation is involutive
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
+ @given(vector=vectors())
+ def test_negation_involutive(vector: Vector2):
+ assert vector == - (- vector)
+
|
Test that negation is involutive
|
## Code Before:
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
## Instruction:
Test that negation is involutive
## Code After:
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
+
+ @given(vector=vectors())
+ def test_negation_involutive(vector: Vector2):
+ assert vector == - (- vector)
|
6dfa381b26948b97b7abc3de9f1a02618fd5ad0f
|
src/geoserver/style.py
|
src/geoserver/style.py
|
from geoserver.support import ResourceInfo, atom_link
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
def __repr__(self):
return "Style[%s]" % self.name
|
from geoserver.support import ResourceInfo, atom_link
import re
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
def body_href(self):
style_container = re.sub(r"/rest$", "/styles", self.catalog.service_url)
return "%s/%s" % (style_container, self.filename)
def __repr__(self):
return "Style[%s]" % self.name
|
Add body_href method for getting a public url for a Style's body.
|
Add body_href method for getting a public url for a Style's body.
|
Python
|
mit
|
cristianzamar/gsconfig,scottp-dpaw/gsconfig,boundlessgeo/gsconfig,Geode/gsconfig,afabiani/gsconfig,garnertb/gsconfig.py
|
from geoserver.support import ResourceInfo, atom_link
+ import re
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
+ def body_href(self):
+ style_container = re.sub(r"/rest$", "/styles", self.catalog.service_url)
+ return "%s/%s" % (style_container, self.filename)
+
def __repr__(self):
return "Style[%s]" % self.name
|
Add body_href method for getting a public url for a Style's body.
|
## Code Before:
from geoserver.support import ResourceInfo, atom_link
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
def __repr__(self):
return "Style[%s]" % self.name
## Instruction:
Add body_href method for getting a public url for a Style's body.
## Code After:
from geoserver.support import ResourceInfo, atom_link
import re
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
def body_href(self):
style_container = re.sub(r"/rest$", "/styles", self.catalog.service_url)
return "%s/%s" % (style_container, self.filename)
def __repr__(self):
return "Style[%s]" % self.name
|
from geoserver.support import ResourceInfo, atom_link
+ import re
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
+ def body_href(self):
+ style_container = re.sub(r"/rest$", "/styles", self.catalog.service_url)
+ return "%s/%s" % (style_container, self.filename)
+
def __repr__(self):
return "Style[%s]" % self.name
|
279a7dfcdd854999d490164da3dc3790430e639a
|
membership/management/commands/public_memberlist.py
|
membership/management/commands/public_memberlist.py
|
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
|
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
Fix UnicodeDecodeError: Return text string, not bytes
|
Fix UnicodeDecodeError: Return text string, not bytes
|
Python
|
mit
|
kapsiry/sikteeri,AriMartti/sikteeri,kapsiry/sikteeri,kapsiry/sikteeri,annttu/sikteeri,joneskoo/sikteeri,annttu/sikteeri,AriMartti/sikteeri,kapsiry/sikteeri,joneskoo/sikteeri,AriMartti/sikteeri,annttu/sikteeri,joneskoo/sikteeri,annttu/sikteeri,AriMartti/sikteeri,joneskoo/sikteeri
|
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
- return render_to_string(template_name, data).encode('utf-8')
+ return render_to_string(template_name, data)
|
Fix UnicodeDecodeError: Return text string, not bytes
|
## Code Before:
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
## Instruction:
Fix UnicodeDecodeError: Return text string, not bytes
## Code After:
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
- return render_to_string(template_name, data).encode('utf-8')
? ----------------
+ return render_to_string(template_name, data)
|
cdd1f3410b8ae304485f7992ac6048e1277cffe1
|
parsedatetime/pdt_locales/__init__.py
|
parsedatetime/pdt_locales/__init__.py
|
try:
import PyICU as pyicu
except:
pyicu = None
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
|
import os
try:
import PyICU as pyicu
except:
pyicu = None
import yaml
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
pdtLocales = [
'icu',
'en_US',
'en_AU',
'es_ES',
'de_DE',
'nl_NL',
'ru_RU',
]
def load_yaml(path):
"""
Read yaml data from filepath
:param path:
:return:
"""
with open(path, 'r') as fio:
return yaml.load(fio.read())
def _get_yaml_path(locale):
"""
Return filepath of locale file
:param locale:
:return:
"""
return os.path.join(os.path.dirname(__file__), '%s.yaml' % locale)
def load_locale(locale):
"""
Return data of locale
:param locale:
:return:
"""
assert locale in pdtLocales, "The locale '%s' is not supported" % locale
_data_base = load_yaml(_get_yaml_path('base'))
return _data_base.update(**load_yaml(_get_yaml_path(locale)))
load_locale('ru_RU')
|
Add local locale from file
|
Add local locale from file
|
Python
|
apache-2.0
|
phoebebright/parsedatetime,bear/parsedatetime,idpaterson/parsedatetime
|
+ import os
try:
import PyICU as pyicu
except:
pyicu = None
+
+ import yaml
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
+ pdtLocales = [
+ 'icu',
+ 'en_US',
+ 'en_AU',
+ 'es_ES',
+ 'de_DE',
+ 'nl_NL',
+ 'ru_RU',
+ ]
+
+
+ def load_yaml(path):
+ """
+ Read yaml data from filepath
+ :param path:
+ :return:
+ """
+ with open(path, 'r') as fio:
+ return yaml.load(fio.read())
+
+
+ def _get_yaml_path(locale):
+ """
+ Return filepath of locale file
+ :param locale:
+ :return:
+ """
+ return os.path.join(os.path.dirname(__file__), '%s.yaml' % locale)
+
+
+ def load_locale(locale):
+ """
+ Return data of locale
+ :param locale:
+ :return:
+ """
+ assert locale in pdtLocales, "The locale '%s' is not supported" % locale
+ _data_base = load_yaml(_get_yaml_path('base'))
+ return _data_base.update(**load_yaml(_get_yaml_path(locale)))
+
+
+ load_locale('ru_RU')
+
|
Add local locale from file
|
## Code Before:
try:
import PyICU as pyicu
except:
pyicu = None
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
## Instruction:
Add local locale from file
## Code After:
import os
try:
import PyICU as pyicu
except:
pyicu = None
import yaml
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
pdtLocales = [
'icu',
'en_US',
'en_AU',
'es_ES',
'de_DE',
'nl_NL',
'ru_RU',
]
def load_yaml(path):
"""
Read yaml data from filepath
:param path:
:return:
"""
with open(path, 'r') as fio:
return yaml.load(fio.read())
def _get_yaml_path(locale):
"""
Return filepath of locale file
:param locale:
:return:
"""
return os.path.join(os.path.dirname(__file__), '%s.yaml' % locale)
def load_locale(locale):
"""
Return data of locale
:param locale:
:return:
"""
assert locale in pdtLocales, "The locale '%s' is not supported" % locale
_data_base = load_yaml(_get_yaml_path('base'))
return _data_base.update(**load_yaml(_get_yaml_path(locale)))
load_locale('ru_RU')
|
+ import os
try:
import PyICU as pyicu
except:
pyicu = None
+
+ import yaml
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
+
+ pdtLocales = [
+ 'icu',
+ 'en_US',
+ 'en_AU',
+ 'es_ES',
+ 'de_DE',
+ 'nl_NL',
+ 'ru_RU',
+ ]
+
+
+ def load_yaml(path):
+ """
+ Read yaml data from filepath
+ :param path:
+ :return:
+ """
+ with open(path, 'r') as fio:
+ return yaml.load(fio.read())
+
+
+ def _get_yaml_path(locale):
+ """
+ Return filepath of locale file
+ :param locale:
+ :return:
+ """
+ return os.path.join(os.path.dirname(__file__), '%s.yaml' % locale)
+
+
+ def load_locale(locale):
+ """
+ Return data of locale
+ :param locale:
+ :return:
+ """
+ assert locale in pdtLocales, "The locale '%s' is not supported" % locale
+ _data_base = load_yaml(_get_yaml_path('base'))
+ return _data_base.update(**load_yaml(_get_yaml_path(locale)))
+
+
+ load_locale('ru_RU')
|
899882be398f8a31e706a590c0a7e297c1589c25
|
threat_intel/util/error_messages.py
|
threat_intel/util/error_messages.py
|
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
Fix deprecation warning interfering with tests
|
Fix deprecation warning interfering with tests
|
Python
|
mit
|
Yelp/threat_intel,megancarney/threat_intel,SYNchroACK/threat_intel
|
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
- sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
+ sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
Fix deprecation warning interfering with tests
|
## Code Before:
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
## Instruction:
Fix deprecation warning interfering with tests
## Code After:
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
- sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
? -----------------------------
+ sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
? ++++ +
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
d60b0ee8c212728721f47cc57303ae24888cc387
|
models.py
|
models.py
|
import datetime
import math
from flask import Markup
from peewee import Model, TextField, DateTimeField
from app import db
class Quote(Model):
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
def html(self):
return Markup(self.content)
@classmethod
def paged(cls, page, page_size):
quotes = Quote.select().order_by(Quote.timestamp.desc())
page_count = math.ceil(quotes.count() / page_size)
return quotes.offset(page * page_size).limit(page_size), page_count
|
import datetime
import math
from flask import Markup
from peewee import Model, TextField, DateTimeField
from app import db
class Quote(Model):
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
def html(self):
return Markup(self.content.replace('\n', '<br>'))
@classmethod
def paged(cls, page, page_size):
quotes = Quote.select().order_by(Quote.timestamp.desc())
page_count = math.ceil(quotes.count() / page_size)
return quotes.offset(page * page_size).limit(page_size), page_count
|
Add support for carriage returns
|
Add support for carriage returns
|
Python
|
apache-2.0
|
agateau/tmc2,agateau/tmc2
|
import datetime
import math
from flask import Markup
from peewee import Model, TextField, DateTimeField
from app import db
class Quote(Model):
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
def html(self):
- return Markup(self.content)
+ return Markup(self.content.replace('\n', '<br>'))
@classmethod
def paged(cls, page, page_size):
quotes = Quote.select().order_by(Quote.timestamp.desc())
page_count = math.ceil(quotes.count() / page_size)
return quotes.offset(page * page_size).limit(page_size), page_count
|
Add support for carriage returns
|
## Code Before:
import datetime
import math
from flask import Markup
from peewee import Model, TextField, DateTimeField
from app import db
class Quote(Model):
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
def html(self):
return Markup(self.content)
@classmethod
def paged(cls, page, page_size):
quotes = Quote.select().order_by(Quote.timestamp.desc())
page_count = math.ceil(quotes.count() / page_size)
return quotes.offset(page * page_size).limit(page_size), page_count
## Instruction:
Add support for carriage returns
## Code After:
import datetime
import math
from flask import Markup
from peewee import Model, TextField, DateTimeField
from app import db
class Quote(Model):
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
def html(self):
return Markup(self.content.replace('\n', '<br>'))
@classmethod
def paged(cls, page, page_size):
quotes = Quote.select().order_by(Quote.timestamp.desc())
page_count = math.ceil(quotes.count() / page_size)
return quotes.offset(page * page_size).limit(page_size), page_count
|
import datetime
import math
from flask import Markup
from peewee import Model, TextField, DateTimeField
from app import db
class Quote(Model):
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class Meta:
database = db
def html(self):
- return Markup(self.content)
+ return Markup(self.content.replace('\n', '<br>'))
? +++++++++++++++++++++ +
@classmethod
def paged(cls, page, page_size):
quotes = Quote.select().order_by(Quote.timestamp.desc())
page_count = math.ceil(quotes.count() / page_size)
return quotes.offset(page * page_size).limit(page_size), page_count
|
bf36e307b13148d40e978ebb32151a3ea0e32cf9
|
stampman/tests/test_api.py
|
stampman/tests/test_api.py
|
import unittest
from stampman.services import pool
class TestAPIEndpoint(unittest.TestCase):
pass
|
import unittest
import json
import requests
from stampman import main
class TestAPIRoot(unittest.TestCase):
def setUp(self):
self._port = "8000"
self._path = "http://0.0.0.0"
main.app.config['TESTING'] = True
self._app = main.app.test_client()
def testGetJson(self):
response = self._app.get("/")
expected_response = [
{
"services": [
{
"name": "mailgun",
"priority": 2
},
{
"name": "sendgrid",
"priority": 1
}
],
"url": "http://localhost/mail.sshukla.de",
"domain": "mail.sshukla.de"
}
]
response_dict = json.loads(str(response.data, 'utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response_dict, expected_response)
|
Add unit test for testing flask endpoint
|
Add unit test for testing flask endpoint
Test GET on the `/` endpoint
|
Python
|
mit
|
thunderboltsid/stampman
|
import unittest
- from stampman.services import pool
+ import json
+
+ import requests
+ from stampman import main
- class TestAPIEndpoint(unittest.TestCase):
+ class TestAPIRoot(unittest.TestCase):
- pass
+ def setUp(self):
+ self._port = "8000"
+ self._path = "http://0.0.0.0"
+ main.app.config['TESTING'] = True
+ self._app = main.app.test_client()
+ def testGetJson(self):
+ response = self._app.get("/")
+ expected_response = [
+ {
+ "services": [
+ {
+ "name": "mailgun",
+ "priority": 2
+ },
+ {
+ "name": "sendgrid",
+ "priority": 1
+ }
+ ],
+ "url": "http://localhost/mail.sshukla.de",
+ "domain": "mail.sshukla.de"
+ }
+ ]
+ response_dict = json.loads(str(response.data, 'utf-8'))
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.content_type, "application/json")
+ self.assertEqual(response_dict, expected_response)
+
|
Add unit test for testing flask endpoint
|
## Code Before:
import unittest
from stampman.services import pool
class TestAPIEndpoint(unittest.TestCase):
pass
## Instruction:
Add unit test for testing flask endpoint
## Code After:
import unittest
import json
import requests
from stampman import main
class TestAPIRoot(unittest.TestCase):
def setUp(self):
self._port = "8000"
self._path = "http://0.0.0.0"
main.app.config['TESTING'] = True
self._app = main.app.test_client()
def testGetJson(self):
response = self._app.get("/")
expected_response = [
{
"services": [
{
"name": "mailgun",
"priority": 2
},
{
"name": "sendgrid",
"priority": 1
}
],
"url": "http://localhost/mail.sshukla.de",
"domain": "mail.sshukla.de"
}
]
response_dict = json.loads(str(response.data, 'utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response_dict, expected_response)
|
import unittest
- from stampman.services import pool
+ import json
+
+ import requests
+ from stampman import main
- class TestAPIEndpoint(unittest.TestCase):
? ^^^^ ^^
+ class TestAPIRoot(unittest.TestCase):
? ^ ^
- pass
+ def setUp(self):
+ self._port = "8000"
+ self._path = "http://0.0.0.0"
+ main.app.config['TESTING'] = True
+ self._app = main.app.test_client()
+
+ def testGetJson(self):
+ response = self._app.get("/")
+ expected_response = [
+ {
+ "services": [
+ {
+ "name": "mailgun",
+ "priority": 2
+ },
+ {
+ "name": "sendgrid",
+ "priority": 1
+ }
+ ],
+ "url": "http://localhost/mail.sshukla.de",
+ "domain": "mail.sshukla.de"
+ }
+ ]
+ response_dict = json.loads(str(response.data, 'utf-8'))
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.content_type, "application/json")
+ self.assertEqual(response_dict, expected_response)
|
b3838e3ad3c8c9575c18ae9770504405fd3f5006
|
saltcloud/mapper.py
|
saltcloud/mapper.py
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
Add parallel capability for running the map
|
Add parallel capability for running the map
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
+ import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
+ if self.opts['parallel']:
+ multiprocessing.Process(
+ target=self.cloud.create(vm_)
+ ).start()
+ else:
- self.cloud.create(vm_)
+ self.cloud.create(vm_)
|
Add parallel capability for running the map
|
## Code Before:
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
## Instruction:
Add parallel capability for running the map
## Code After:
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
+ import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
+ if self.opts['parallel']:
+ multiprocessing.Process(
+ target=self.cloud.create(vm_)
+ ).start()
+ else:
- self.cloud.create(vm_)
+ self.cloud.create(vm_)
? ++++
|
c96a2f636b48b065e8404af6d67fbae5986fd34a
|
tests/basics/subclass_native2_tuple.py
|
tests/basics/subclass_native2_tuple.py
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
Expand test cases for equality of subclasses.
|
tests/basics: Expand test cases for equality of subclasses.
|
Python
|
mit
|
pramasoul/micropython,adafruit/circuitpython,henriknelson/micropython,MrSurly/micropython,bvernoux/micropython,tobbad/micropython,kerneltask/micropython,kerneltask/micropython,tobbad/micropython,tobbad/micropython,pramasoul/micropython,selste/micropython,adafruit/circuitpython,henriknelson/micropython,pozetroninc/micropython,pozetroninc/micropython,MrSurly/micropython,pozetroninc/micropython,adafruit/circuitpython,kerneltask/micropython,tobbad/micropython,pramasoul/micropython,selste/micropython,bvernoux/micropython,MrSurly/micropython,adafruit/circuitpython,bvernoux/micropython,selste/micropython,henriknelson/micropython,pozetroninc/micropython,kerneltask/micropython,henriknelson/micropython,tobbad/micropython,kerneltask/micropython,pozetroninc/micropython,selste/micropython,pramasoul/micropython,MrSurly/micropython,pramasoul/micropython,MrSurly/micropython,henriknelson/micropython,bvernoux/micropython,adafruit/circuitpython,selste/micropython,bvernoux/micropython,adafruit/circuitpython
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
+ a = tuple([1,2,3])
+ b = Ctuple1([1,2,3])
+ c = Ctuple2([1,2,3])
+
+ print(a == b)
+ print(b == c)
+ print(c == a)
+
|
Expand test cases for equality of subclasses.
|
## Code Before:
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
## Instruction:
Expand test cases for equality of subclasses.
## Code After:
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
+
+ a = tuple([1,2,3])
+ b = Ctuple1([1,2,3])
+ c = Ctuple2([1,2,3])
+
+ print(a == b)
+ print(b == c)
+ print(c == a)
|
c3afc6c28530c3dfc3bd57d9a1841a60bf92ba4f
|
tools/perf/benchmarks/netsim_top25.py
|
tools/perf/benchmarks/netsim_top25.py
|
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
test.clear_cache_before_each_run = True
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
|
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
def __init__(self):
super(NetsimTop25, self).__init__()
self.test.clear_cache_before_each_run = True
|
Fix bug which caused page cyclers to always clear cache before load.
|
[Telemetry] Fix bug which caused page cyclers to always clear cache before load.
Previously, the cache clearing bit would apply when the netsim benchmark was
imported. This fixes it so that it only applies when it is used.
BUG=256492
NOTRY=True
[email protected]
Review URL: https://codereview.chromium.org/18550003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@209708 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,Chilledheart/chromium,littlstar/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Chilledheart/chromium,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,ltilve/chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,M4sse/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,Just-D/chromium-1,ltilve/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,chuan9/chromium-crosswalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,patrickm/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,Just-D/chromium-1,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,littlstar/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,Chilledheart/chromium,littlstar/chromium.src,dednal/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,jaruba/chromium.src,M4sse/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,markYoungH/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,jaruba/chromium.src,dednal/chromium.src,dushu1203/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,dednal/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,anirudhSK/chromium,dushu1203/chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,anirudhSK/chromium,dednal/chromium.src,Just-D/chromium-1,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl
|
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
- test.clear_cache_before_each_run = True
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
+ def __init__(self):
+ super(NetsimTop25, self).__init__()
+ self.test.clear_cache_before_each_run = True
+
|
Fix bug which caused page cyclers to always clear cache before load.
|
## Code Before:
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
test.clear_cache_before_each_run = True
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
## Instruction:
Fix bug which caused page cyclers to always clear cache before load.
## Code After:
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
def __init__(self):
super(NetsimTop25, self).__init__()
self.test.clear_cache_before_each_run = True
|
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
- test.clear_cache_before_each_run = True
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
+
+ def __init__(self):
+ super(NetsimTop25, self).__init__()
+ self.test.clear_cache_before_each_run = True
|
7bfa9d24f7af811746bbb0336b5e75a592cff186
|
aws_eis/lib/checks.py
|
aws_eis/lib/checks.py
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
Fix KeyError: 'version' due to 403 Forbidden error
|
Fix KeyError: 'version' due to 403 Forbidden error
|
Python
|
mit
|
jpdoria/aws_eis
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
- es_version = get_version(endpoint)
- if r.status_code == 200:
- print('ESVersion: {}'.format(es_version))
- print('Connection: OK')
+ try:
+ es_version = get_version(endpoint)
+ except KeyError:
- print('Status: {}\n'.format(r.status_code))
+ print('Status: {}'.format(r.status_code))
+ sys.exit(1)
else:
- print(json.loads(msg)['Message'])
+ if r.status_code == 200:
+ print('ESVersion: {}'.format(es_version))
+ print('Connection: OK')
- print('Status: {}'.format(status_code))
+ print('Status: {}\n'.format(r.status_code))
- sys.exit(1)
|
Fix KeyError: 'version' due to 403 Forbidden error
|
## Code Before:
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
## Instruction:
Fix KeyError: 'version' due to 403 Forbidden error
## Code After:
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
- es_version = get_version(endpoint)
- if r.status_code == 200:
- print('ESVersion: {}'.format(es_version))
- print('Connection: OK')
+ try:
+ es_version = get_version(endpoint)
+ except KeyError:
- print('Status: {}\n'.format(r.status_code))
? --
+ print('Status: {}'.format(r.status_code))
+ sys.exit(1)
else:
- print(json.loads(msg)['Message'])
+ if r.status_code == 200:
+ print('ESVersion: {}'.format(es_version))
+ print('Connection: OK')
- print('Status: {}'.format(status_code))
+ print('Status: {}\n'.format(r.status_code))
? ++++ ++ ++
- sys.exit(1)
|
fe974197217eff350f1dc0bc5687c83066d6dd34
|
kaggle_tools/features_engineering/dates_engineering.py
|
kaggle_tools/features_engineering/dates_engineering.py
|
import pandas as pd
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
|
import pandas as pd
import pytz
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
def localize_datetime(input_df, timezone='Europe/Paris',
datetime_column='tms_gmt'):
"""
Convert datetime column from UTC to another timezone.
"""
tmz = pytz.timezone(timezone)
df = input_df.copy()
return (df.set_index(datetime_column)
.tz_localize(pytz.utc) # UTC time
.tz_convert(tmz)) # Timezone time
|
Add a datetime localization function
|
Add a datetime localization function
|
Python
|
mit
|
yassineAlouini/kaggle-tools,yassineAlouini/kaggle-tools
|
import pandas as pd
+ import pytz
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
+
+ def localize_datetime(input_df, timezone='Europe/Paris',
+ datetime_column='tms_gmt'):
+ """
+ Convert datetime column from UTC to another timezone.
+ """
+ tmz = pytz.timezone(timezone)
+ df = input_df.copy()
+ return (df.set_index(datetime_column)
+ .tz_localize(pytz.utc) # UTC time
+ .tz_convert(tmz)) # Timezone time
+
|
Add a datetime localization function
|
## Code Before:
import pandas as pd
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
## Instruction:
Add a datetime localization function
## Code After:
import pandas as pd
import pytz
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
def localize_datetime(input_df, timezone='Europe/Paris',
datetime_column='tms_gmt'):
"""
Convert datetime column from UTC to another timezone.
"""
tmz = pytz.timezone(timezone)
df = input_df.copy()
return (df.set_index(datetime_column)
.tz_localize(pytz.utc) # UTC time
.tz_convert(tmz)) # Timezone time
|
import pandas as pd
+ import pytz
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
+
+
+ def localize_datetime(input_df, timezone='Europe/Paris',
+ datetime_column='tms_gmt'):
+ """
+ Convert datetime column from UTC to another timezone.
+ """
+ tmz = pytz.timezone(timezone)
+ df = input_df.copy()
+ return (df.set_index(datetime_column)
+ .tz_localize(pytz.utc) # UTC time
+ .tz_convert(tmz)) # Timezone time
|
d610e03ef113d37d516bd9432bd3f43f3d443563
|
tests/test_commands.py
|
tests/test_commands.py
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
Add failing test for gh-5
|
Add failing test for gh-5
|
Python
|
bsd-3-clause
|
jupyter/testpath
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
+ def test_assert_calls_twice(self):
+ with assert_calls('git'):
+ call(['git'])
+
+ with self.assertRaises(AssertionError):
+ with assert_calls('git'):
+ pass
+
|
Add failing test for gh-5
|
## Code Before:
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
## Instruction:
Add failing test for gh-5
## Code After:
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
+
+ def test_assert_calls_twice(self):
+ with assert_calls('git'):
+ call(['git'])
+
+ with self.assertRaises(AssertionError):
+ with assert_calls('git'):
+ pass
|
6e6aa02907b3d156174cfe1a5f8e9c274c080778
|
SegNetCMR/helpers.py
|
SegNetCMR/helpers.py
|
import tensorflow as tf
def add_output_images(images, logits, labels):
cast_labels = tf.cast(labels, tf.uint8) * 128
cast_labels = cast_labels[...,None]
tf.summary.image('input_labels', cast_labels, max_outputs=3)
classification1 = tf.nn.softmax(logits = logits, dim=-1)[...,1]
output_image_gb = images[...,0]
output_image_r = classification1 + tf.multiply(images[...,0], (1-classification1))
output_image = tf.stack([output_image_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_mixed', output_image, max_outputs=3)
output_image_binary = tf.argmax(logits, 3)
output_image_binary = tf.cast(output_image_binary[...,None], tf.float32) * 128/255
tf.summary.image('output_labels', output_image_binary, max_outputs=3)
return
|
import tensorflow as tf
def add_output_images(images, logits, labels):
cast_labels = tf.cast(labels, tf.uint8) * 128
cast_labels = cast_labels[...,None]
tf.summary.image('input_labels', cast_labels, max_outputs=3)
classification1 = tf.nn.softmax(logits = logits, dim=-1)[...,1]
output_image_gb = images[...,0]
output_image_r = classification1 + tf.multiply(images[...,0], (1-classification1))
output_image = tf.stack([output_image_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_mixed', output_image, max_outputs=3)
output_image_binary = tf.argmax(logits, 3)
output_image_binary = tf.cast(output_image_binary[...,None], tf.float32) * 128/255
tf.summary.image('output_labels', output_image_binary, max_outputs=3)
output_labels_mixed_r = output_image_binary[...,0] + tf.multiply(images[...,0], (1-output_image_binary[...,0]))
output_labels_mixed = tf.stack([output_labels_mixed_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_labels_mixed', output_labels_mixed, max_outputs=3)
return
|
Add output with images mixed with binary version of output labels
|
Add output with images mixed with binary version of output labels
|
Python
|
mit
|
mshunshin/SegNetCMR,mshunshin/SegNetCMR
|
import tensorflow as tf
def add_output_images(images, logits, labels):
cast_labels = tf.cast(labels, tf.uint8) * 128
cast_labels = cast_labels[...,None]
tf.summary.image('input_labels', cast_labels, max_outputs=3)
classification1 = tf.nn.softmax(logits = logits, dim=-1)[...,1]
output_image_gb = images[...,0]
output_image_r = classification1 + tf.multiply(images[...,0], (1-classification1))
output_image = tf.stack([output_image_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_mixed', output_image, max_outputs=3)
output_image_binary = tf.argmax(logits, 3)
output_image_binary = tf.cast(output_image_binary[...,None], tf.float32) * 128/255
tf.summary.image('output_labels', output_image_binary, max_outputs=3)
+ output_labels_mixed_r = output_image_binary[...,0] + tf.multiply(images[...,0], (1-output_image_binary[...,0]))
+ output_labels_mixed = tf.stack([output_labels_mixed_r, output_image_gb, output_image_gb], axis=3)
+ tf.summary.image('output_labels_mixed', output_labels_mixed, max_outputs=3)
+
return
|
Add output with images mixed with binary version of output labels
|
## Code Before:
import tensorflow as tf
def add_output_images(images, logits, labels):
cast_labels = tf.cast(labels, tf.uint8) * 128
cast_labels = cast_labels[...,None]
tf.summary.image('input_labels', cast_labels, max_outputs=3)
classification1 = tf.nn.softmax(logits = logits, dim=-1)[...,1]
output_image_gb = images[...,0]
output_image_r = classification1 + tf.multiply(images[...,0], (1-classification1))
output_image = tf.stack([output_image_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_mixed', output_image, max_outputs=3)
output_image_binary = tf.argmax(logits, 3)
output_image_binary = tf.cast(output_image_binary[...,None], tf.float32) * 128/255
tf.summary.image('output_labels', output_image_binary, max_outputs=3)
return
## Instruction:
Add output with images mixed with binary version of output labels
## Code After:
import tensorflow as tf
def add_output_images(images, logits, labels):
cast_labels = tf.cast(labels, tf.uint8) * 128
cast_labels = cast_labels[...,None]
tf.summary.image('input_labels', cast_labels, max_outputs=3)
classification1 = tf.nn.softmax(logits = logits, dim=-1)[...,1]
output_image_gb = images[...,0]
output_image_r = classification1 + tf.multiply(images[...,0], (1-classification1))
output_image = tf.stack([output_image_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_mixed', output_image, max_outputs=3)
output_image_binary = tf.argmax(logits, 3)
output_image_binary = tf.cast(output_image_binary[...,None], tf.float32) * 128/255
tf.summary.image('output_labels', output_image_binary, max_outputs=3)
output_labels_mixed_r = output_image_binary[...,0] + tf.multiply(images[...,0], (1-output_image_binary[...,0]))
output_labels_mixed = tf.stack([output_labels_mixed_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_labels_mixed', output_labels_mixed, max_outputs=3)
return
|
import tensorflow as tf
def add_output_images(images, logits, labels):
cast_labels = tf.cast(labels, tf.uint8) * 128
cast_labels = cast_labels[...,None]
tf.summary.image('input_labels', cast_labels, max_outputs=3)
classification1 = tf.nn.softmax(logits = logits, dim=-1)[...,1]
output_image_gb = images[...,0]
output_image_r = classification1 + tf.multiply(images[...,0], (1-classification1))
output_image = tf.stack([output_image_r, output_image_gb, output_image_gb], axis=3)
tf.summary.image('output_mixed', output_image, max_outputs=3)
output_image_binary = tf.argmax(logits, 3)
output_image_binary = tf.cast(output_image_binary[...,None], tf.float32) * 128/255
tf.summary.image('output_labels', output_image_binary, max_outputs=3)
+ output_labels_mixed_r = output_image_binary[...,0] + tf.multiply(images[...,0], (1-output_image_binary[...,0]))
+ output_labels_mixed = tf.stack([output_labels_mixed_r, output_image_gb, output_image_gb], axis=3)
+ tf.summary.image('output_labels_mixed', output_labels_mixed, max_outputs=3)
+
return
|
3ef1531f6934055a416cdddc694f6ca75694d649
|
voltron/common.py
|
voltron/common.py
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
Make use of expanduser() more sane
|
Make use of expanduser() more sane
|
Python
|
mit
|
snare/voltron,snare/voltron,snare/voltron,snare/voltron
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
- VOLTRON_DIR = '~/.voltron/'
+ VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
Make use of expanduser() more sane
|
## Code Before:
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
## Instruction:
Make use of expanduser() more sane
## Code After:
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
- VOLTRON_DIR = '~/.voltron/'
+ VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
cd5802850fab20648748b4be4a47ad4cc050c32d
|
tests/test_fields/common_tests.py
|
tests/test_fields/common_tests.py
|
from unittest import TestCase
from lie2me import Field
class CommonTests(object):
def get_instance(self):
return self.Field()
def test_submitting_empty_value_on_required_field_returns_error(self):
field = self.get_instance()
field.required = True
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_submitting_empty_value_on_optional_field_does_not_return_error(self):
field = self.get_instance()
field.required = False
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_is_required_by_default(self):
field = self.get_instance()
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_field_with_default_is_not_required(self):
field = self.get_instance()
field.default = self.valid_default
value, error = field.submit(field.empty_value())
self.assertTrue(value)
self.assertFalse(error)
def test_field_instance_can_overwrite_specific_messages(self):
field = self.get_instance()
field.messages = {'required': 'Lorem ipsum'}
value, error = field.submit(None)
self.assertIn('Lorem ipsum', str(error))
|
from unittest import TestCase
from lie2me import Field
class CommonTests(object):
def get_instance(self):
return self.Field()
def test_submitting_empty_value_on_required_field_returns_error(self):
field = self.get_instance()
field.required = True
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_submitting_empty_value_on_optional_field_does_not_return_error(self):
field = self.get_instance()
field.required = False
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_is_required_by_default(self):
field = self.get_instance()
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_field_with_default_is_not_required(self):
field = self.get_instance()
field.default = self.valid_default
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_instance_can_overwrite_specific_messages(self):
field = self.get_instance()
field.messages = {'required': 'Lorem ipsum'}
value, error = field.submit(None)
self.assertIn('Lorem ipsum', str(error))
|
Fix detail in one of the common tests
|
Fix detail in one of the common tests
The value returned by the submit will not necessarily be truish.
|
Python
|
mit
|
hugollm/lie2me,hugollm/lie2me
|
from unittest import TestCase
from lie2me import Field
class CommonTests(object):
def get_instance(self):
return self.Field()
def test_submitting_empty_value_on_required_field_returns_error(self):
field = self.get_instance()
field.required = True
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_submitting_empty_value_on_optional_field_does_not_return_error(self):
field = self.get_instance()
field.required = False
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_is_required_by_default(self):
field = self.get_instance()
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_field_with_default_is_not_required(self):
field = self.get_instance()
field.default = self.valid_default
value, error = field.submit(field.empty_value())
- self.assertTrue(value)
self.assertFalse(error)
def test_field_instance_can_overwrite_specific_messages(self):
field = self.get_instance()
field.messages = {'required': 'Lorem ipsum'}
value, error = field.submit(None)
self.assertIn('Lorem ipsum', str(error))
|
Fix detail in one of the common tests
|
## Code Before:
from unittest import TestCase
from lie2me import Field
class CommonTests(object):
def get_instance(self):
return self.Field()
def test_submitting_empty_value_on_required_field_returns_error(self):
field = self.get_instance()
field.required = True
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_submitting_empty_value_on_optional_field_does_not_return_error(self):
field = self.get_instance()
field.required = False
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_is_required_by_default(self):
field = self.get_instance()
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_field_with_default_is_not_required(self):
field = self.get_instance()
field.default = self.valid_default
value, error = field.submit(field.empty_value())
self.assertTrue(value)
self.assertFalse(error)
def test_field_instance_can_overwrite_specific_messages(self):
field = self.get_instance()
field.messages = {'required': 'Lorem ipsum'}
value, error = field.submit(None)
self.assertIn('Lorem ipsum', str(error))
## Instruction:
Fix detail in one of the common tests
## Code After:
from unittest import TestCase
from lie2me import Field
class CommonTests(object):
def get_instance(self):
return self.Field()
def test_submitting_empty_value_on_required_field_returns_error(self):
field = self.get_instance()
field.required = True
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_submitting_empty_value_on_optional_field_does_not_return_error(self):
field = self.get_instance()
field.required = False
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_is_required_by_default(self):
field = self.get_instance()
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_field_with_default_is_not_required(self):
field = self.get_instance()
field.default = self.valid_default
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_instance_can_overwrite_specific_messages(self):
field = self.get_instance()
field.messages = {'required': 'Lorem ipsum'}
value, error = field.submit(None)
self.assertIn('Lorem ipsum', str(error))
|
from unittest import TestCase
from lie2me import Field
class CommonTests(object):
def get_instance(self):
return self.Field()
def test_submitting_empty_value_on_required_field_returns_error(self):
field = self.get_instance()
field.required = True
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_submitting_empty_value_on_optional_field_does_not_return_error(self):
field = self.get_instance()
field.required = False
value, error = field.submit(field.empty_value())
self.assertFalse(error)
def test_field_is_required_by_default(self):
field = self.get_instance()
value, error = field.submit(field.empty_value())
self.assertTrue(error)
def test_field_with_default_is_not_required(self):
field = self.get_instance()
field.default = self.valid_default
value, error = field.submit(field.empty_value())
- self.assertTrue(value)
self.assertFalse(error)
def test_field_instance_can_overwrite_specific_messages(self):
field = self.get_instance()
field.messages = {'required': 'Lorem ipsum'}
value, error = field.submit(None)
self.assertIn('Lorem ipsum', str(error))
|
efabe61cec636d5104a639b8d5cfef23eb840dd7
|
apps/live/urls.py
|
apps/live/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
Remove the missing view references.
|
Remove the missing view references.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
+ from .views import StatusView
- from .views import (AwayView, DiscussionView, EpilogueView, GameView,
- NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
Remove the missing view references.
|
## Code Before:
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
## Instruction:
Remove the missing view references.
## Code After:
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
+ from .views import StatusView
- from .views import (AwayView, DiscussionView, EpilogueView, GameView,
- NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
88a5a74ee1e3d3f3fe9e6a43bacd73b2f3f5bb96
|
tests/test_mongo.py
|
tests/test_mongo.py
|
import unittest
import logging
logging.basicConfig()
logger = logging.getLogger()
from checks.db.mongo import MongoDb
class TestMongo(unittest.TestCase):
def setUp(self):
self.c = MongoDb(logger)
def testCheck(self):
r = self.c.check({"MongoDBServer": "blah"})
self.assertEquals(r["connections"]["current"], 1)
self.assertEquals("opcounters" in r, False)
r = self.c.check({"MongoDBServer": "blah"})
self.assertEquals(r["connections"]["current"], 1)
self.assertEquals(r["asserts"]["regularPS"], 0)
self.assertEquals(r["asserts"]["userPS"], 0)
self.assertEquals(r["opcounters"]["commandPS"], (244 - 18) / (10191 - 2893))
if __name__ == '__main__':
unittest.main()
|
import unittest
import logging
logging.basicConfig()
import subprocess
from tempfile import mkdtemp
from checks.db.mongo import MongoDb
PORT1 = 27017
PORT2 = 37017
class TestMongo(unittest.TestCase):
def setUp(self):
self.c = MongoDb(logging.getLogger())
# Start 1 instances of Mongo
dir1 = mkdtemp()
self.p1 = subprocess.Popen(["mongod", "--dbpath", dir1, "--port", str(PORT1)],
executable="mongod",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def tearDown(self):
if self.p1 is not None:
self.p1.terminate()
def testCheck(self):
if self.p1 is not None:
r = self.c.check({"MongoDBServer": "localhost", "mongodb_port": PORT1})
self.assertEquals(r and r["connections"]["current"] == 1, True)
assert r["connections"]["available"] >= 1
assert r["uptime"] >= 0, r
assert r["mem"]["resident"] > 0
assert r["mem"]["virtual"] > 0
if __name__ == '__main__':
unittest.main()
|
Test does start a mongo instance.
|
Test does start a mongo instance.
|
Python
|
bsd-3-clause
|
jshum/dd-agent,mderomph-coolblue/dd-agent,AniruddhaSAtre/dd-agent,remh/dd-agent,lookout/dd-agent,PagerDuty/dd-agent,Mashape/dd-agent,indeedops/dd-agent,GabrielNicolasAvellaneda/dd-agent,AntoCard/powerdns-recursor_check,citrusleaf/dd-agent,benmccann/dd-agent,gphat/dd-agent,mderomph-coolblue/dd-agent,zendesk/dd-agent,huhongbo/dd-agent,a20012251/dd-agent,joelvanvelden/dd-agent,huhongbo/dd-agent,jshum/dd-agent,truthbk/dd-agent,AniruddhaSAtre/dd-agent,brettlangdon/dd-agent,jraede/dd-agent,jvassev/dd-agent,zendesk/dd-agent,jshum/dd-agent,darron/dd-agent,Mashape/dd-agent,polynomial/dd-agent,ess/dd-agent,amalakar/dd-agent,mderomph-coolblue/dd-agent,jyogi/purvar-agent,huhongbo/dd-agent,GabrielNicolasAvellaneda/dd-agent,urosgruber/dd-agent,gphat/dd-agent,cberry777/dd-agent,Mashape/dd-agent,urosgruber/dd-agent,citrusleaf/dd-agent,pfmooney/dd-agent,pmav99/praktoras,amalakar/dd-agent,a20012251/dd-agent,manolama/dd-agent,cberry777/dd-agent,JohnLZeller/dd-agent,yuecong/dd-agent,citrusleaf/dd-agent,packetloop/dd-agent,brettlangdon/dd-agent,Shopify/dd-agent,eeroniemi/dd-agent,pmav99/praktoras,darron/dd-agent,PagerDuty/dd-agent,c960657/dd-agent,JohnLZeller/dd-agent,jraede/dd-agent,benmccann/dd-agent,AniruddhaSAtre/dd-agent,pfmooney/dd-agent,ess/dd-agent,takus/dd-agent,PagerDuty/dd-agent,polynomial/dd-agent,tebriel/dd-agent,takus/dd-agent,oneandoneis2/dd-agent,AntoCard/powerdns-recursor_check,zendesk/dd-agent,joelvanvelden/dd-agent,jamesandariese/dd-agent,tebriel/dd-agent,oneandoneis2/dd-agent,guruxu/dd-agent,jraede/dd-agent,yuecong/dd-agent,oneandoneis2/dd-agent,PagerDuty/dd-agent,pmav99/praktoras,lookout/dd-agent,relateiq/dd-agent,jamesandariese/dd-agent,Shopify/dd-agent,truthbk/dd-agent,manolama/dd-agent,eeroniemi/dd-agent,indeedops/dd-agent,gphat/dd-agent,jvassev/dd-agent,urosgruber/dd-agent,jraede/dd-agent,indeedops/dd-agent,a20012251/dd-agent,huhongbo/dd-agent,mderomph-coolblue/dd-agent,Wattpad/dd-agent,remh/dd-agent,Shopify/dd-agent,takus/dd-agent,joelvanvelden/dd-agent,Mashape/dd-agent,pmav99/praktoras,relateiq/dd-agent,amalakar/dd-agent,ess/dd-agent,truthbk/dd-agent,relateiq/dd-agent,jshum/dd-agent,lookout/dd-agent,brettlangdon/dd-agent,jvassev/dd-agent,darron/dd-agent,manolama/dd-agent,eeroniemi/dd-agent,yuecong/dd-agent,ess/dd-agent,c960657/dd-agent,AntoCard/powerdns-recursor_check,zendesk/dd-agent,urosgruber/dd-agent,tebriel/dd-agent,jamesandariese/dd-agent,truthbk/dd-agent,jshum/dd-agent,relateiq/dd-agent,benmccann/dd-agent,guruxu/dd-agent,jvassev/dd-agent,pfmooney/dd-agent,packetloop/dd-agent,ess/dd-agent,amalakar/dd-agent,yuecong/dd-agent,guruxu/dd-agent,a20012251/dd-agent,polynomial/dd-agent,oneandoneis2/dd-agent,gphat/dd-agent,indeedops/dd-agent,Shopify/dd-agent,zendesk/dd-agent,AniruddhaSAtre/dd-agent,darron/dd-agent,citrusleaf/dd-agent,oneandoneis2/dd-agent,tebriel/dd-agent,packetloop/dd-agent,a20012251/dd-agent,Wattpad/dd-agent,jyogi/purvar-agent,jamesandariese/dd-agent,jamesandariese/dd-agent,JohnLZeller/dd-agent,relateiq/dd-agent,pfmooney/dd-agent,indeedops/dd-agent,jvassev/dd-agent,PagerDuty/dd-agent,brettlangdon/dd-agent,darron/dd-agent,Wattpad/dd-agent,remh/dd-agent,eeroniemi/dd-agent,c960657/dd-agent,GabrielNicolasAvellaneda/dd-agent,gphat/dd-agent,tebriel/dd-agent,guruxu/dd-agent,brettlangdon/dd-agent,benmccann/dd-agent,takus/dd-agent,remh/dd-agent,Mashape/dd-agent,manolama/dd-agent,JohnLZeller/dd-agent,JohnLZeller/dd-agent,takus/dd-agent,truthbk/dd-agent,pfmooney/dd-agent,polynomial/dd-agent,citrusleaf/dd-agent,yuecong/dd-agent,cberry777/dd-agent,c960657/dd-agent,urosgruber/dd-agent,manolama/dd-agent,AntoCard/powerdns-recursor_check,jyogi/purvar-agent,Wattpad/dd-agent,GabrielNicolasAvellaneda/dd-agent,remh/dd-agent,jyogi/purvar-agent,pmav99/praktoras,jyogi/purvar-agent,cberry777/dd-agent,mderomph-coolblue/dd-agent,lookout/dd-agent,benmccann/dd-agent,polynomial/dd-agent,amalakar/dd-agent,huhongbo/dd-agent,joelvanvelden/dd-agent,packetloop/dd-agent,packetloop/dd-agent,GabrielNicolasAvellaneda/dd-agent,AntoCard/powerdns-recursor_check,guruxu/dd-agent,AniruddhaSAtre/dd-agent,c960657/dd-agent,cberry777/dd-agent,eeroniemi/dd-agent,joelvanvelden/dd-agent,Wattpad/dd-agent,jraede/dd-agent,lookout/dd-agent,Shopify/dd-agent
|
import unittest
import logging
logging.basicConfig()
- logger = logging.getLogger()
+ import subprocess
+ from tempfile import mkdtemp
from checks.db.mongo import MongoDb
+ PORT1 = 27017
+ PORT2 = 37017
+
class TestMongo(unittest.TestCase):
def setUp(self):
- self.c = MongoDb(logger)
+ self.c = MongoDb(logging.getLogger())
+ # Start 1 instances of Mongo
+ dir1 = mkdtemp()
+ self.p1 = subprocess.Popen(["mongod", "--dbpath", dir1, "--port", str(PORT1)],
+ executable="mongod",
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+ def tearDown(self):
+ if self.p1 is not None:
+ self.p1.terminate()
def testCheck(self):
- r = self.c.check({"MongoDBServer": "blah"})
+ if self.p1 is not None:
+ r = self.c.check({"MongoDBServer": "localhost", "mongodb_port": PORT1})
- self.assertEquals(r["connections"]["current"], 1)
+ self.assertEquals(r and r["connections"]["current"] == 1, True)
+ assert r["connections"]["available"] >= 1
+ assert r["uptime"] >= 0, r
+ assert r["mem"]["resident"] > 0
+ assert r["mem"]["virtual"] > 0
- self.assertEquals("opcounters" in r, False)
-
- r = self.c.check({"MongoDBServer": "blah"})
- self.assertEquals(r["connections"]["current"], 1)
- self.assertEquals(r["asserts"]["regularPS"], 0)
- self.assertEquals(r["asserts"]["userPS"], 0)
- self.assertEquals(r["opcounters"]["commandPS"], (244 - 18) / (10191 - 2893))
-
if __name__ == '__main__':
unittest.main()
|
Test does start a mongo instance.
|
## Code Before:
import unittest
import logging
logging.basicConfig()
logger = logging.getLogger()
from checks.db.mongo import MongoDb
class TestMongo(unittest.TestCase):
def setUp(self):
self.c = MongoDb(logger)
def testCheck(self):
r = self.c.check({"MongoDBServer": "blah"})
self.assertEquals(r["connections"]["current"], 1)
self.assertEquals("opcounters" in r, False)
r = self.c.check({"MongoDBServer": "blah"})
self.assertEquals(r["connections"]["current"], 1)
self.assertEquals(r["asserts"]["regularPS"], 0)
self.assertEquals(r["asserts"]["userPS"], 0)
self.assertEquals(r["opcounters"]["commandPS"], (244 - 18) / (10191 - 2893))
if __name__ == '__main__':
unittest.main()
## Instruction:
Test does start a mongo instance.
## Code After:
import unittest
import logging
logging.basicConfig()
import subprocess
from tempfile import mkdtemp
from checks.db.mongo import MongoDb
PORT1 = 27017
PORT2 = 37017
class TestMongo(unittest.TestCase):
def setUp(self):
self.c = MongoDb(logging.getLogger())
# Start 1 instances of Mongo
dir1 = mkdtemp()
self.p1 = subprocess.Popen(["mongod", "--dbpath", dir1, "--port", str(PORT1)],
executable="mongod",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def tearDown(self):
if self.p1 is not None:
self.p1.terminate()
def testCheck(self):
if self.p1 is not None:
r = self.c.check({"MongoDBServer": "localhost", "mongodb_port": PORT1})
self.assertEquals(r and r["connections"]["current"] == 1, True)
assert r["connections"]["available"] >= 1
assert r["uptime"] >= 0, r
assert r["mem"]["resident"] > 0
assert r["mem"]["virtual"] > 0
if __name__ == '__main__':
unittest.main()
|
import unittest
import logging
logging.basicConfig()
- logger = logging.getLogger()
+ import subprocess
+ from tempfile import mkdtemp
from checks.db.mongo import MongoDb
+ PORT1 = 27017
+ PORT2 = 37017
+
class TestMongo(unittest.TestCase):
def setUp(self):
- self.c = MongoDb(logger)
+ self.c = MongoDb(logging.getLogger())
? +++++++++++ + +
+ # Start 1 instances of Mongo
+ dir1 = mkdtemp()
+ self.p1 = subprocess.Popen(["mongod", "--dbpath", dir1, "--port", str(PORT1)],
+ executable="mongod",
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+ def tearDown(self):
+ if self.p1 is not None:
+ self.p1.terminate()
def testCheck(self):
- r = self.c.check({"MongoDBServer": "blah"})
+ if self.p1 is not None:
+ r = self.c.check({"MongoDBServer": "localhost", "mongodb_port": PORT1})
- self.assertEquals(r["connections"]["current"], 1)
? ^
+ self.assertEquals(r and r["connections"]["current"] == 1, True)
? ++++ ++++++ +++++ ^^^^
+ assert r["connections"]["available"] >= 1
+ assert r["uptime"] >= 0, r
+ assert r["mem"]["resident"] > 0
+ assert r["mem"]["virtual"] > 0
- self.assertEquals("opcounters" in r, False)
-
- r = self.c.check({"MongoDBServer": "blah"})
- self.assertEquals(r["connections"]["current"], 1)
- self.assertEquals(r["asserts"]["regularPS"], 0)
- self.assertEquals(r["asserts"]["userPS"], 0)
- self.assertEquals(r["opcounters"]["commandPS"], (244 - 18) / (10191 - 2893))
-
if __name__ == '__main__':
unittest.main()
|
e49fb537143cd0936b62ef53e294717d6ca4dc6f
|
tests/test_automaton.py
|
tests/test_automaton.py
|
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
|
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
Refactor abstract method test to reduce duplication
|
Refactor abstract method test to reduce duplication
|
Python
|
mit
|
caleb531/automata
|
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
+ abstract_methods = {
+ '__init__': (Automaton,),
+ '_init_from_formal_params': (Automaton,),
+ 'validate_self': (Automaton,),
+ '_validate_input_yield': (Automaton, '')
+ }
+ for method_name, method_args in abstract_methods.items():
- with nose.assert_raises(NotImplementedError):
+ with nose.assert_raises(NotImplementedError):
+ getattr(Automaton, method_name)(*method_args)
- Automaton.__init__(Automaton)
- with nose.assert_raises(NotImplementedError):
- Automaton._init_from_formal_params(Automaton)
- with nose.assert_raises(NotImplementedError):
- Automaton.validate_self(Automaton)
- with nose.assert_raises(NotImplementedError):
- Automaton._validate_input_yield(Automaton, None)
|
Refactor abstract method test to reduce duplication
|
## Code Before:
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
## Instruction:
Refactor abstract method test to reduce duplication
## Code After:
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
+ abstract_methods = {
+ '__init__': (Automaton,),
+ '_init_from_formal_params': (Automaton,),
+ 'validate_self': (Automaton,),
+ '_validate_input_yield': (Automaton, '')
+ }
+ for method_name, method_args in abstract_methods.items():
- with nose.assert_raises(NotImplementedError):
+ with nose.assert_raises(NotImplementedError):
? ++++
+ getattr(Automaton, method_name)(*method_args)
- Automaton.__init__(Automaton)
- with nose.assert_raises(NotImplementedError):
- Automaton._init_from_formal_params(Automaton)
- with nose.assert_raises(NotImplementedError):
- Automaton.validate_self(Automaton)
- with nose.assert_raises(NotImplementedError):
- Automaton._validate_input_yield(Automaton, None)
|
d012763c57450555d45385ed9b254f500388618e
|
automata/render.py
|
automata/render.py
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.animation as animation
class AnimatedGif:
""" Setup various rendering things
"""
def __init__(self, dpi=100, colors="Purples"):
self.frames = []
self.fig = plt.figure(dpi=dpi)
plt.axis("off")
self.colors = colors
self.dimensions = None
def append(self, universe):
if not self.dimensions:
if len(universe.shape) != 2 and not (len(universe.shape) == 3 and universe.shape[2] in [3, 4]):
raise ValueError("Only handles 2D arrays of numbers, or 2D arrays of RGB(A) values")
self.dimensions = universe.shape
if self.dimensions != universe.shape:
raise ValueError("Shape changed from {} to {}".format(self.dimensions, universe.shape))
self.frames.append((plt.imshow(universe, cmap=self.colors),))
def render(self, filename, interval=300):
im_ani = animation.ArtistAnimation(
self.fig, self.frames, interval=interval, repeat_delay=3000, blit=True
)
im_ani.save(filename, writer="imagemagick")
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.colors
import matplotlib.pyplot as plt
import matplotlib.animation as animation
class AnimatedGif:
""" Setup various rendering things
"""
def __init__(self, dpi=100, colors="Purples"):
self.frames = []
self.fig = plt.figure(dpi=dpi)
plt.axis("off")
self.colors = colors
self.normalize = matplotlib.colors.Normalize()
self.dimensions = None
def append(self, universe):
if not self.dimensions:
if len(universe.shape) != 2 and not (len(universe.shape) == 3 and universe.shape[2] in [3, 4]):
raise ValueError("Only handles 2D arrays of numbers, or 2D arrays of RGB(A) values")
self.dimensions = universe.shape
if self.dimensions != universe.shape:
raise ValueError("Shape changed from {} to {}".format(self.dimensions, universe.shape))
self.frames.append((plt.imshow(universe, norm=self.normalize, cmap=self.colors),))
def render(self, filename, interval=300):
im_ani = animation.ArtistAnimation(
self.fig, self.frames, interval=interval, repeat_delay=3000, blit=True
)
im_ani.save(filename, writer="imagemagick")
|
Use the same normlization for whole gif
|
Use the same normlization for whole gif
|
Python
|
apache-2.0
|
stevearm/automata
|
import matplotlib
matplotlib.use('Agg')
+ import matplotlib.colors
import matplotlib.pyplot as plt
import matplotlib.animation as animation
class AnimatedGif:
""" Setup various rendering things
"""
def __init__(self, dpi=100, colors="Purples"):
self.frames = []
self.fig = plt.figure(dpi=dpi)
plt.axis("off")
self.colors = colors
+ self.normalize = matplotlib.colors.Normalize()
self.dimensions = None
def append(self, universe):
if not self.dimensions:
if len(universe.shape) != 2 and not (len(universe.shape) == 3 and universe.shape[2] in [3, 4]):
raise ValueError("Only handles 2D arrays of numbers, or 2D arrays of RGB(A) values")
self.dimensions = universe.shape
if self.dimensions != universe.shape:
raise ValueError("Shape changed from {} to {}".format(self.dimensions, universe.shape))
- self.frames.append((plt.imshow(universe, cmap=self.colors),))
+ self.frames.append((plt.imshow(universe, norm=self.normalize, cmap=self.colors),))
def render(self, filename, interval=300):
im_ani = animation.ArtistAnimation(
self.fig, self.frames, interval=interval, repeat_delay=3000, blit=True
)
im_ani.save(filename, writer="imagemagick")
|
Use the same normlization for whole gif
|
## Code Before:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.animation as animation
class AnimatedGif:
""" Setup various rendering things
"""
def __init__(self, dpi=100, colors="Purples"):
self.frames = []
self.fig = plt.figure(dpi=dpi)
plt.axis("off")
self.colors = colors
self.dimensions = None
def append(self, universe):
if not self.dimensions:
if len(universe.shape) != 2 and not (len(universe.shape) == 3 and universe.shape[2] in [3, 4]):
raise ValueError("Only handles 2D arrays of numbers, or 2D arrays of RGB(A) values")
self.dimensions = universe.shape
if self.dimensions != universe.shape:
raise ValueError("Shape changed from {} to {}".format(self.dimensions, universe.shape))
self.frames.append((plt.imshow(universe, cmap=self.colors),))
def render(self, filename, interval=300):
im_ani = animation.ArtistAnimation(
self.fig, self.frames, interval=interval, repeat_delay=3000, blit=True
)
im_ani.save(filename, writer="imagemagick")
## Instruction:
Use the same normlization for whole gif
## Code After:
import matplotlib
matplotlib.use('Agg')
import matplotlib.colors
import matplotlib.pyplot as plt
import matplotlib.animation as animation
class AnimatedGif:
""" Setup various rendering things
"""
def __init__(self, dpi=100, colors="Purples"):
self.frames = []
self.fig = plt.figure(dpi=dpi)
plt.axis("off")
self.colors = colors
self.normalize = matplotlib.colors.Normalize()
self.dimensions = None
def append(self, universe):
if not self.dimensions:
if len(universe.shape) != 2 and not (len(universe.shape) == 3 and universe.shape[2] in [3, 4]):
raise ValueError("Only handles 2D arrays of numbers, or 2D arrays of RGB(A) values")
self.dimensions = universe.shape
if self.dimensions != universe.shape:
raise ValueError("Shape changed from {} to {}".format(self.dimensions, universe.shape))
self.frames.append((plt.imshow(universe, norm=self.normalize, cmap=self.colors),))
def render(self, filename, interval=300):
im_ani = animation.ArtistAnimation(
self.fig, self.frames, interval=interval, repeat_delay=3000, blit=True
)
im_ani.save(filename, writer="imagemagick")
|
import matplotlib
matplotlib.use('Agg')
+ import matplotlib.colors
import matplotlib.pyplot as plt
import matplotlib.animation as animation
class AnimatedGif:
""" Setup various rendering things
"""
def __init__(self, dpi=100, colors="Purples"):
self.frames = []
self.fig = plt.figure(dpi=dpi)
plt.axis("off")
self.colors = colors
+ self.normalize = matplotlib.colors.Normalize()
self.dimensions = None
def append(self, universe):
if not self.dimensions:
if len(universe.shape) != 2 and not (len(universe.shape) == 3 and universe.shape[2] in [3, 4]):
raise ValueError("Only handles 2D arrays of numbers, or 2D arrays of RGB(A) values")
self.dimensions = universe.shape
if self.dimensions != universe.shape:
raise ValueError("Shape changed from {} to {}".format(self.dimensions, universe.shape))
- self.frames.append((plt.imshow(universe, cmap=self.colors),))
+ self.frames.append((plt.imshow(universe, norm=self.normalize, cmap=self.colors),))
? +++++++++++++++++++++
def render(self, filename, interval=300):
im_ani = animation.ArtistAnimation(
self.fig, self.frames, interval=interval, repeat_delay=3000, blit=True
)
im_ani.save(filename, writer="imagemagick")
|
86f7badc8913783eb559a61569fc2b80ceedf744
|
src/nfc/archive/dummy_archive.py
|
src/nfc/archive/dummy_archive.py
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
Add open and close methods to dummy archive.
|
Add open and close methods to dummy archive.
|
Python
|
mit
|
HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
+
+
+ def open(self, cache_db=False):
+ pass
+
+
+ def close(self):
+ pass
|
Add open and close methods to dummy archive.
|
## Code Before:
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
## Instruction:
Add open and close methods to dummy archive.
## Code After:
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
+
+
+ def open(self, cache_db=False):
+ pass
+
+
+ def close(self):
+ pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.