commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2fa8b2f4a63579633272b1cc8d972baf27c661f2
|
pmg/models/__init__.py
|
pmg/models/__init__.py
|
from .users import *
from .resources import *
from .emails import *
from .pages import *
from .soundcloud_track import *
|
from .users import *
from .resources import *
from .emails import *
from .pages import *
from .soundcloud_track import SoundcloudTrack
|
Fix error on admin user_report view
|
Fix error on admin user_report view
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
from .users import *
from .resources import *
from .emails import *
from .pages import *
- from .soundcloud_track import *
+ from .soundcloud_track import SoundcloudTrack
|
Fix error on admin user_report view
|
## Code Before:
from .users import *
from .resources import *
from .emails import *
from .pages import *
from .soundcloud_track import *
## Instruction:
Fix error on admin user_report view
## Code After:
from .users import *
from .resources import *
from .emails import *
from .pages import *
from .soundcloud_track import SoundcloudTrack
|
// ... existing code ...
from .pages import *
from .soundcloud_track import SoundcloudTrack
// ... rest of the code ...
|
6618b12cef2759174148d1c7f69cbb91b8ea4482
|
mygpo/podcasts/migrations/0015_auto_20140616_2126.py
|
mygpo/podcasts/migrations/0015_auto_20140616_2126.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
def set_scope(apps, schema_editor):
URL = apps.get_model('podcasts', 'URL')
Slug = apps.get_model('podcasts', 'Slug')
URL.objects.filter(scope__isnull=True).update(scope='')
Slug.objects.filter(scope__isnull=True).update(scope='')
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.RunPython(set_scope),
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
]
|
Fix data migration when making scope non-null
|
[DB] Fix data migration when making scope non-null
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
from __future__ import unicode_literals
from django.db import models, migrations
+
+
+ def set_scope(apps, schema_editor):
+ URL = apps.get_model('podcasts', 'URL')
+ Slug = apps.get_model('podcasts', 'Slug')
+
+ URL.objects.filter(scope__isnull=True).update(scope='')
+ Slug.objects.filter(scope__isnull=True).update(scope='')
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
- field=models.CharField(db_index=True, max_length=32, blank=True),
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.AlterField(
model_name='url',
name='scope',
- field=models.CharField(db_index=True, max_length=32, blank=True),
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
+ migrations.RunPython(set_scope),
+ migrations.AlterField(
+ model_name='slug',
+ name='scope',
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
+ ),
+ migrations.AlterField(
+ model_name='url',
+ name='scope',
+ field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
+ ),
+
]
|
Fix data migration when making scope non-null
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True),
),
]
## Instruction:
Fix data migration when making scope non-null
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
def set_scope(apps, schema_editor):
URL = apps.get_model('podcasts', 'URL')
Slug = apps.get_model('podcasts', 'Slug')
URL.objects.filter(scope__isnull=True).update(scope='')
Slug.objects.filter(scope__isnull=True).update(scope='')
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20140615_1032'),
]
operations = [
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.RunPython(set_scope),
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
]
|
# ... existing code ...
from django.db import models, migrations
def set_scope(apps, schema_editor):
URL = apps.get_model('podcasts', 'URL')
Slug = apps.get_model('podcasts', 'Slug')
URL.objects.filter(scope__isnull=True).update(scope='')
Slug.objects.filter(scope__isnull=True).update(scope='')
# ... modified code ...
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
...
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=True),
),
migrations.RunPython(set_scope),
migrations.AlterField(
model_name='slug',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
migrations.AlterField(
model_name='url',
name='scope',
field=models.CharField(db_index=True, max_length=32, blank=True, null=False),
),
]
# ... rest of the code ...
|
23edca2a2a87ca0d96becd92a0bf930cc6c33b6f
|
alltheitems/world.py
|
alltheitems/world.py
|
import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
|
import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
|
Fix API method names called by World.block_at
|
Fix API method names called by World.block_at
|
Python
|
mit
|
wurstmineberg/alltheitems.wurstmineberg.de,wurstmineberg/alltheitems.wurstmineberg.de
|
import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
- Dimension.overworld: api.v2.chunk_info_overworld,
+ Dimension.overworld: api.v2.api_chunk_info_overworld,
- Dimension.nether: api.v2.chunk_info_nether,
+ Dimension.nether: api.v2.api_chunk_info_nether,
- Dimension.end: api.v2.chunk_info_end
+ Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
|
Fix API method names called by World.block_at
|
## Code Before:
import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.chunk_info_overworld,
Dimension.nether: api.v2.chunk_info_nether,
Dimension.end: api.v2.chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
## Instruction:
Fix API method names called by World.block_at
## Code After:
import alltheitems.__main__ as ati
import api.v2
import enum
import minecraft
class Dimension(enum.Enum):
overworld = 0
nether = -1
end = 1
class World:
def __init__(self, world=None):
if world is None:
self.world = minecraft.World()
elif isinstance(world, minecraft.World):
self.world = world
elif isinstance(world, str):
self.world = minecraft.World(world)
else:
raise TypeError('Invalid world type: {}'.format(type(world)))
def block_at(self, x, y, z, dimension=Dimension.overworld):
chunk_x, block_x = divmod(x, 16)
chunk_y, block_y = divmod(y, 16)
chunk_z, block_z = divmod(z, 16)
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
return chunk[block_y][block_z][block_x]
|
// ... existing code ...
chunk = {
Dimension.overworld: api.v2.api_chunk_info_overworld,
Dimension.nether: api.v2.api_chunk_info_nether,
Dimension.end: api.v2.api_chunk_info_end
}[dimension](self.world, chunk_x, chunk_y, chunk_z)
// ... rest of the code ...
|
a75dc02612fd2159731d8fdc04e85a2fbc0138d0
|
bvspca/core/templatetags/utility_tags.py
|
bvspca/core/templatetags/utility_tags.py
|
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def to_css_name(value):
return value.lower().replace(' ', '-')
@register.filter
def get_property(instance, key):
return getattr(instance, key)
@register.assignment_tag
def get_google_maps_key():
return getattr(settings, 'GOOGLE_MAPS_KEY', "")
@register.assignment_tag
def get_google_analytics_id():
return getattr(settings, 'GOOGLE_ANALYTICS_ID', "")
|
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def to_css_name(value):
return value.lower().replace(' ', '-')
@register.filter
def get_property(instance, key):
return getattr(instance, key)
@register.simple_tag
def get_google_maps_key():
return getattr(settings, 'GOOGLE_MAPS_KEY', "")
@register.simple_tag
def get_google_analytics_id():
return getattr(settings, 'GOOGLE_ANALYTICS_ID', "")
|
Switch from deprecated assignment tags to simple tags
|
Switch from deprecated assignment tags to simple tags
|
Python
|
mit
|
nfletton/bvspca,nfletton/bvspca,nfletton/bvspca,nfletton/bvspca
|
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def to_css_name(value):
return value.lower().replace(' ', '-')
@register.filter
def get_property(instance, key):
return getattr(instance, key)
- @register.assignment_tag
+ @register.simple_tag
def get_google_maps_key():
return getattr(settings, 'GOOGLE_MAPS_KEY', "")
- @register.assignment_tag
+ @register.simple_tag
def get_google_analytics_id():
return getattr(settings, 'GOOGLE_ANALYTICS_ID', "")
|
Switch from deprecated assignment tags to simple tags
|
## Code Before:
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def to_css_name(value):
return value.lower().replace(' ', '-')
@register.filter
def get_property(instance, key):
return getattr(instance, key)
@register.assignment_tag
def get_google_maps_key():
return getattr(settings, 'GOOGLE_MAPS_KEY', "")
@register.assignment_tag
def get_google_analytics_id():
return getattr(settings, 'GOOGLE_ANALYTICS_ID', "")
## Instruction:
Switch from deprecated assignment tags to simple tags
## Code After:
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def to_css_name(value):
return value.lower().replace(' ', '-')
@register.filter
def get_property(instance, key):
return getattr(instance, key)
@register.simple_tag
def get_google_maps_key():
return getattr(settings, 'GOOGLE_MAPS_KEY', "")
@register.simple_tag
def get_google_analytics_id():
return getattr(settings, 'GOOGLE_ANALYTICS_ID', "")
|
# ... existing code ...
@register.simple_tag
def get_google_maps_key():
# ... modified code ...
@register.simple_tag
def get_google_analytics_id():
# ... rest of the code ...
|
a4f78af5b2973b044337dc430118fc270e527220
|
allauth/socialaccount/providers/keycloak/provider.py
|
allauth/socialaccount/providers/keycloak/provider.py
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
Use preferred_username claim for username
|
fix(keycloak): Use preferred_username claim for username
As per the OpenID Connect spec the standard username claim is
`preferred_username`.
By default Keycloak confirms to OpenID Connect spec and provides a
`preferred_username` claim, but no `username` claim in the profile
scope.
ref: https://openid.net/specs/openid-connect-basic-1_0-28.html#StandardClaims
|
Python
|
mit
|
pennersr/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
- username=data.get('username'),
+ username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
Use preferred_username claim for username
|
## Code Before:
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
## Instruction:
Use preferred_username claim for username
## Code After:
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
// ... existing code ...
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
// ... rest of the code ...
|
04416cd9652a9fdc3ab58664ab4b96cbaff3f698
|
simuvex/s_event.py
|
simuvex/s_event.py
|
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
|
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
|
Python
|
bsd-2-clause
|
axt/angr,schieb/angr,angr/angr,tyb0807/angr,f-prettyland/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/angr,f-prettyland/angr,angr/angr,axt/angr,tyb0807/angr,iamahuman/angr,iamahuman/angr,chubbymaggie/angr,angr/simuvex,schieb/angr,iamahuman/angr,axt/angr,angr/angr,f-prettyland/angr,schieb/angr
|
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
- self.sim_procedure = state.scratch.sim_procedure.__class__
+ self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
|
## Code Before:
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
## Instruction:
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
## Code After:
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
// ... existing code ...
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
// ... rest of the code ...
|
4a8170079e2b715d40e94f5d407d110a635f8a5d
|
InvenTree/common/apps.py
|
InvenTree/common/apps.py
|
from django.apps import AppConfig
from django.db.utils import OperationalError, ProgrammingError, IntegrityError
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
""" Will be called when the Common app is first loaded """
self.add_instance_name()
self.add_default_settings()
def add_instance_name(self):
"""
Check if an InstanceName has been defined for this database.
If not, create a random one!
"""
# See note above
from .models import InvenTreeSetting
"""
Note: The "old" instance name was stored under the key 'InstanceName',
but has now been renamed to 'INVENTREE_INSTANCE'.
"""
try:
# Quick exit if a value already exists for 'inventree_instance'
if InvenTreeSetting.objects.filter(key='INVENTREE_INSTANCE').exists():
return
# Default instance name
instance_name = InvenTreeSetting.get_default_value('INVENTREE_INSTANCE')
# Use the old name if it exists
if InvenTreeSetting.objects.filter(key='InstanceName').exists():
instance = InvenTreeSetting.objects.get(key='InstanceName')
instance_name = instance.value
# Delete the legacy key
instance.delete()
# Create new value
InvenTreeSetting.objects.create(
key='INVENTREE_INSTANCE',
value=instance_name
)
except (OperationalError, ProgrammingError, IntegrityError):
# Migrations have not yet been applied - table does not exist
pass
def add_default_settings(self):
"""
Create all required settings, if they do not exist.
"""
from .models import InvenTreeSetting
for key in InvenTreeSetting.GLOBAL_SETTINGS.keys():
try:
settings = InvenTreeSetting.objects.filter(key__iexact=key)
if settings.count() == 0:
value = InvenTreeSetting.get_default_value(key)
print(f"Creating default setting for {key} -> '{value}'")
InvenTreeSetting.objects.create(
key=key,
value=value
)
return
elif settings.count() > 1:
# Prevent multiple shadow copies of the same setting!
for setting in settings[1:]:
setting.delete()
# Ensure that the key has the correct case
setting = settings[0]
if not setting.key == key:
setting.key = key
setting.save()
except (OperationalError, ProgrammingError, IntegrityError):
# Table might not yet exist
pass
|
from django.apps import AppConfig
from django.db.utils import OperationalError, ProgrammingError, IntegrityError
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
pass
|
Remove code which automatically created settings objects on server launch
|
Remove code which automatically created settings objects on server launch
|
Python
|
mit
|
inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree
|
from django.apps import AppConfig
from django.db.utils import OperationalError, ProgrammingError, IntegrityError
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
+ pass
- """ Will be called when the Common app is first loaded """
- self.add_instance_name()
- self.add_default_settings()
-
- def add_instance_name(self):
- """
- Check if an InstanceName has been defined for this database.
- If not, create a random one!
- """
-
- # See note above
- from .models import InvenTreeSetting
-
- """
- Note: The "old" instance name was stored under the key 'InstanceName',
- but has now been renamed to 'INVENTREE_INSTANCE'.
- """
-
- try:
-
- # Quick exit if a value already exists for 'inventree_instance'
- if InvenTreeSetting.objects.filter(key='INVENTREE_INSTANCE').exists():
- return
-
- # Default instance name
- instance_name = InvenTreeSetting.get_default_value('INVENTREE_INSTANCE')
-
- # Use the old name if it exists
- if InvenTreeSetting.objects.filter(key='InstanceName').exists():
- instance = InvenTreeSetting.objects.get(key='InstanceName')
- instance_name = instance.value
-
- # Delete the legacy key
- instance.delete()
-
- # Create new value
- InvenTreeSetting.objects.create(
- key='INVENTREE_INSTANCE',
- value=instance_name
- )
-
- except (OperationalError, ProgrammingError, IntegrityError):
- # Migrations have not yet been applied - table does not exist
- pass
-
- def add_default_settings(self):
- """
- Create all required settings, if they do not exist.
- """
-
- from .models import InvenTreeSetting
-
- for key in InvenTreeSetting.GLOBAL_SETTINGS.keys():
- try:
- settings = InvenTreeSetting.objects.filter(key__iexact=key)
-
- if settings.count() == 0:
- value = InvenTreeSetting.get_default_value(key)
-
- print(f"Creating default setting for {key} -> '{value}'")
-
- InvenTreeSetting.objects.create(
- key=key,
- value=value
- )
-
- return
-
- elif settings.count() > 1:
- # Prevent multiple shadow copies of the same setting!
- for setting in settings[1:]:
- setting.delete()
-
- # Ensure that the key has the correct case
- setting = settings[0]
-
- if not setting.key == key:
- setting.key = key
- setting.save()
-
- except (OperationalError, ProgrammingError, IntegrityError):
- # Table might not yet exist
- pass
-
|
Remove code which automatically created settings objects on server launch
|
## Code Before:
from django.apps import AppConfig
from django.db.utils import OperationalError, ProgrammingError, IntegrityError
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
""" Will be called when the Common app is first loaded """
self.add_instance_name()
self.add_default_settings()
def add_instance_name(self):
"""
Check if an InstanceName has been defined for this database.
If not, create a random one!
"""
# See note above
from .models import InvenTreeSetting
"""
Note: The "old" instance name was stored under the key 'InstanceName',
but has now been renamed to 'INVENTREE_INSTANCE'.
"""
try:
# Quick exit if a value already exists for 'inventree_instance'
if InvenTreeSetting.objects.filter(key='INVENTREE_INSTANCE').exists():
return
# Default instance name
instance_name = InvenTreeSetting.get_default_value('INVENTREE_INSTANCE')
# Use the old name if it exists
if InvenTreeSetting.objects.filter(key='InstanceName').exists():
instance = InvenTreeSetting.objects.get(key='InstanceName')
instance_name = instance.value
# Delete the legacy key
instance.delete()
# Create new value
InvenTreeSetting.objects.create(
key='INVENTREE_INSTANCE',
value=instance_name
)
except (OperationalError, ProgrammingError, IntegrityError):
# Migrations have not yet been applied - table does not exist
pass
def add_default_settings(self):
"""
Create all required settings, if they do not exist.
"""
from .models import InvenTreeSetting
for key in InvenTreeSetting.GLOBAL_SETTINGS.keys():
try:
settings = InvenTreeSetting.objects.filter(key__iexact=key)
if settings.count() == 0:
value = InvenTreeSetting.get_default_value(key)
print(f"Creating default setting for {key} -> '{value}'")
InvenTreeSetting.objects.create(
key=key,
value=value
)
return
elif settings.count() > 1:
# Prevent multiple shadow copies of the same setting!
for setting in settings[1:]:
setting.delete()
# Ensure that the key has the correct case
setting = settings[0]
if not setting.key == key:
setting.key = key
setting.save()
except (OperationalError, ProgrammingError, IntegrityError):
# Table might not yet exist
pass
## Instruction:
Remove code which automatically created settings objects on server launch
## Code After:
from django.apps import AppConfig
from django.db.utils import OperationalError, ProgrammingError, IntegrityError
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
pass
|
# ... existing code ...
def ready(self):
pass
# ... rest of the code ...
|
c07234bb3142df96dc9e02a236975bc3de2415cc
|
nailgun/nailgun/test/test_plugin.py
|
nailgun/nailgun/test/test_plugin.py
|
from nailgun.test.base import BaseHandlers
class TestPluginStateMachine(BaseHandlers):
def test_attrs_creation(self):
pass
|
from nailgun.test.base import BaseHandlers
from nailgun.plugin.process import get_queue, PluginProcessor
from nailgun.api.models import Task
class TestPluginProcess(BaseHandlers):
def setUp(self):
super(TestPluginProcess, self).setUp()
self.plugin_processor = PluginProcessor()
self.plugin_processor.start()
def tearDown(self):
super(TestPluginProcess, self).tearDown()
self.plugin_processor.terminate()
def test_task_set_to_error_when_exception_raised(self):
queue = get_queue()
task = Task(name='install_plugin', cache={'plugin_id': -1})
self.env.db.add(task)
self.env.db.commit()
queue.put(task.uuid)
def check_task_status_is_error():
self.env.db.refresh(task)
return task.status == 'error'
self.env.wait_for_true(check_task_status_is_error, timeout=2)
self.assertEquals(task.progress, 100)
|
Implement plugin test on exception handling
|
Implement plugin test on exception handling
|
Python
|
apache-2.0
|
SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,zhaochao/fuel-main,zhaochao/fuel-main,huntxu/fuel-main,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-main,SmartInfrastructures/fuel-main-dev,huntxu/fuel-web,teselkin/fuel-main,ddepaoli3/fuel-main-dev,teselkin/fuel-main,SmartInfrastructures/fuel-web-dev,SergK/fuel-main,dancn/fuel-main-dev,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SergK/fuel-main,zhaochao/fuel-web,dancn/fuel-main-dev,nebril/fuel-web,dancn/fuel-main-dev,AnselZhangGit/fuel-main,Fiware/ops.Fuel-main-dev,AnselZhangGit/fuel-main,nebril/fuel-web,SmartInfrastructures/fuel-main-dev,eayunstack/fuel-web,AnselZhangGit/fuel-main,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-main,SergK/fuel-main,prmtl/fuel-web,zhaochao/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,zhaochao/fuel-web,SmartInfrastructures/fuel-web-dev,koder-ua/nailgun-fcert,koder-ua/nailgun-fcert,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,zhaochao/fuel-web,zhaochao/fuel-web,teselkin/fuel-main,stackforge/fuel-main,prmtl/fuel-web,AnselZhangGit/fuel-main,zhaochao/fuel-main,eayunstack/fuel-web,eayunstack/fuel-main,koder-ua/nailgun-fcert,stackforge/fuel-web,nebril/fuel-web,stackforge/fuel-web,zhaochao/fuel-main,ddepaoli3/fuel-main-dev,Fiware/ops.Fuel-main-dev,prmtl/fuel-web,SmartInfrastructures/fuel-main-dev,Fiware/ops.Fuel-main-dev,zhaochao/fuel-main,koder-ua/nailgun-fcert,huntxu/fuel-web,dancn/fuel-main-dev,stackforge/fuel-main,eayunstack/fuel-web,prmtl/fuel-web,stackforge/fuel-web,huntxu/fuel-web,huntxu/fuel-main,Fiware/ops.Fuel-main-dev,teselkin/fuel-main,nebril/fuel-web,eayunstack/fuel-web,stackforge/fuel-main
|
from nailgun.test.base import BaseHandlers
+ from nailgun.plugin.process import get_queue, PluginProcessor
+ from nailgun.api.models import Task
+ class TestPluginProcess(BaseHandlers):
+ def setUp(self):
+ super(TestPluginProcess, self).setUp()
+ self.plugin_processor = PluginProcessor()
+ self.plugin_processor.start()
- class TestPluginStateMachine(BaseHandlers):
+ def tearDown(self):
+ super(TestPluginProcess, self).tearDown()
+ self.plugin_processor.terminate()
- def test_attrs_creation(self):
- pass
+ def test_task_set_to_error_when_exception_raised(self):
+ queue = get_queue()
+ task = Task(name='install_plugin', cache={'plugin_id': -1})
+ self.env.db.add(task)
+ self.env.db.commit()
+ queue.put(task.uuid)
+
+ def check_task_status_is_error():
+ self.env.db.refresh(task)
+ return task.status == 'error'
+
+ self.env.wait_for_true(check_task_status_is_error, timeout=2)
+ self.assertEquals(task.progress, 100)
+
|
Implement plugin test on exception handling
|
## Code Before:
from nailgun.test.base import BaseHandlers
class TestPluginStateMachine(BaseHandlers):
def test_attrs_creation(self):
pass
## Instruction:
Implement plugin test on exception handling
## Code After:
from nailgun.test.base import BaseHandlers
from nailgun.plugin.process import get_queue, PluginProcessor
from nailgun.api.models import Task
class TestPluginProcess(BaseHandlers):
def setUp(self):
super(TestPluginProcess, self).setUp()
self.plugin_processor = PluginProcessor()
self.plugin_processor.start()
def tearDown(self):
super(TestPluginProcess, self).tearDown()
self.plugin_processor.terminate()
def test_task_set_to_error_when_exception_raised(self):
queue = get_queue()
task = Task(name='install_plugin', cache={'plugin_id': -1})
self.env.db.add(task)
self.env.db.commit()
queue.put(task.uuid)
def check_task_status_is_error():
self.env.db.refresh(task)
return task.status == 'error'
self.env.wait_for_true(check_task_status_is_error, timeout=2)
self.assertEquals(task.progress, 100)
|
...
from nailgun.test.base import BaseHandlers
from nailgun.plugin.process import get_queue, PluginProcessor
from nailgun.api.models import Task
class TestPluginProcess(BaseHandlers):
def setUp(self):
super(TestPluginProcess, self).setUp()
self.plugin_processor = PluginProcessor()
self.plugin_processor.start()
def tearDown(self):
super(TestPluginProcess, self).tearDown()
self.plugin_processor.terminate()
def test_task_set_to_error_when_exception_raised(self):
queue = get_queue()
task = Task(name='install_plugin', cache={'plugin_id': -1})
self.env.db.add(task)
self.env.db.commit()
queue.put(task.uuid)
def check_task_status_is_error():
self.env.db.refresh(task)
return task.status == 'error'
self.env.wait_for_true(check_task_status_is_error, timeout=2)
self.assertEquals(task.progress, 100)
...
|
5b208baa581e16290aa8332df966ad1d61876107
|
deployment/ansible/filter_plugins/custom_filters.py
|
deployment/ansible/filter_plugins/custom_filters.py
|
class FilterModule(object):
''' Additional filters for use within Ansible. '''
def filters(self):
return {
'is_not_in': self.is_not_in,
'is_in': self.is_in,
'some_are_in': self.some_are_in
}
def is_not_in(self, *t):
"""Determnies if there are no elements in common between x and y
x | is_not_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
groups_to_test, all_group_names = t
return set(groups_to_test).isdisjoint(set(all_group_names))
def is_in(self, *t):
"""Determnies if all of the elements in x are a subset of y
x | is_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
groups_to_test, all_group_names = t
return set(groups_to_test).issubset(set(all_group_names))
def some_are_in(self, *t):
"""Determnies if any element in x intersects with y
x | some_are_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
groups_to_test, all_group_names = t
return len(set(groups_to_test) & set(all_group_names)) > 0
|
class FilterModule(object):
''' Additional filters for use within Ansible. '''
def filters(self):
return {
'is_not_in': self.is_not_in,
'is_in': self.is_in,
'some_are_in': self.some_are_in
}
def is_not_in(self, x, y):
"""Determines if there are no elements in common between x and y
x | is_not_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
return set(x).isdisjoint(set(y))
def is_in(self, x, y):
"""Determines if all of the elements in x are a subset of y
x | is_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
return set(x).issubset(set(y))
def some_are_in(self, x, y):
"""Determines if any element in x intersects with y
x | some_are_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
return len(set(x) & set(y)) > 0
|
Add explicit method signature to custom filters
|
Add explicit method signature to custom filters
This changeset adds an explicit method signature to the Ansible custom filters.
|
Python
|
agpl-3.0
|
maurizi/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,RickMohr/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,RickMohr/nyc-trees,kdeloach/nyc-trees,RickMohr/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,RickMohr/nyc-trees,maurizi/nyc-trees
|
class FilterModule(object):
''' Additional filters for use within Ansible. '''
def filters(self):
return {
'is_not_in': self.is_not_in,
'is_in': self.is_in,
'some_are_in': self.some_are_in
}
- def is_not_in(self, *t):
+ def is_not_in(self, x, y):
- """Determnies if there are no elements in common between x and y
+ """Determines if there are no elements in common between x and y
x | is_not_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
- groups_to_test, all_group_names = t
+ return set(x).isdisjoint(set(y))
- return set(groups_to_test).isdisjoint(set(all_group_names))
-
- def is_in(self, *t):
+ def is_in(self, x, y):
- """Determnies if all of the elements in x are a subset of y
+ """Determines if all of the elements in x are a subset of y
x | is_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
- groups_to_test, all_group_names = t
+ return set(x).issubset(set(y))
- return set(groups_to_test).issubset(set(all_group_names))
-
- def some_are_in(self, *t):
+ def some_are_in(self, x, y):
- """Determnies if any element in x intersects with y
+ """Determines if any element in x intersects with y
x | some_are_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
- groups_to_test, all_group_names = t
+ return len(set(x) & set(y)) > 0
- return len(set(groups_to_test) & set(all_group_names)) > 0
-
|
Add explicit method signature to custom filters
|
## Code Before:
class FilterModule(object):
''' Additional filters for use within Ansible. '''
def filters(self):
return {
'is_not_in': self.is_not_in,
'is_in': self.is_in,
'some_are_in': self.some_are_in
}
def is_not_in(self, *t):
"""Determnies if there are no elements in common between x and y
x | is_not_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
groups_to_test, all_group_names = t
return set(groups_to_test).isdisjoint(set(all_group_names))
def is_in(self, *t):
"""Determnies if all of the elements in x are a subset of y
x | is_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
groups_to_test, all_group_names = t
return set(groups_to_test).issubset(set(all_group_names))
def some_are_in(self, *t):
"""Determnies if any element in x intersects with y
x | some_are_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
groups_to_test, all_group_names = t
return len(set(groups_to_test) & set(all_group_names)) > 0
## Instruction:
Add explicit method signature to custom filters
## Code After:
class FilterModule(object):
''' Additional filters for use within Ansible. '''
def filters(self):
return {
'is_not_in': self.is_not_in,
'is_in': self.is_in,
'some_are_in': self.some_are_in
}
def is_not_in(self, x, y):
"""Determines if there are no elements in common between x and y
x | is_not_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
return set(x).isdisjoint(set(y))
def is_in(self, x, y):
"""Determines if all of the elements in x are a subset of y
x | is_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
return set(x).issubset(set(y))
def some_are_in(self, x, y):
"""Determines if any element in x intersects with y
x | some_are_in(y)
Arguments
:param t: A tuple with two elements (x and y)
"""
return len(set(x) & set(y)) > 0
|
...
def is_not_in(self, x, y):
"""Determines if there are no elements in common between x and y
...
"""
return set(x).isdisjoint(set(y))
def is_in(self, x, y):
"""Determines if all of the elements in x are a subset of y
...
"""
return set(x).issubset(set(y))
def some_are_in(self, x, y):
"""Determines if any element in x intersects with y
...
"""
return len(set(x) & set(y)) > 0
...
|
bfbdf34e2efd1d22ee6f15f4655334764106725c
|
locksmith/lightauth/common.py
|
locksmith/lightauth/common.py
|
from locksmith.common import apicall
try:
from django.conf import settings
SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY,
API_NAME = settings.LOCKSMITH_API_NAME
ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/'
except:
SIGNING_KEY = ""
API_NAME = ""
ENDPOINT = ""
def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT):
try:
apicall(endpoint, signing_key,
api=api, endpoint=endpoint, key=key
)
except urllib2.HTTPError as e:
if e.code == 404:
return None
else:
raise
|
from locksmith.common import apicall
import urllib2
try:
from django.conf import settings
SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY
API_NAME = settings.LOCKSMITH_API_NAME
ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/'
except:
SIGNING_KEY = ""
API_NAME = ""
ENDPOINT = ""
def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT):
try:
apicall(endpoint, signing_key,
api=api, key=key
)
return True
except urllib2.HTTPError as e:
if e.code == 404:
return None
else:
raise
|
Make client key checking actually work.
|
Make client key checking actually work.
|
Python
|
bsd-3-clause
|
sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith
|
from locksmith.common import apicall
+ import urllib2
try:
from django.conf import settings
- SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY,
+ SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY
API_NAME = settings.LOCKSMITH_API_NAME
ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/'
except:
SIGNING_KEY = ""
API_NAME = ""
ENDPOINT = ""
def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT):
try:
apicall(endpoint, signing_key,
- api=api, endpoint=endpoint, key=key
+ api=api, key=key
)
+ return True
except urllib2.HTTPError as e:
if e.code == 404:
return None
else:
raise
|
Make client key checking actually work.
|
## Code Before:
from locksmith.common import apicall
try:
from django.conf import settings
SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY,
API_NAME = settings.LOCKSMITH_API_NAME
ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/'
except:
SIGNING_KEY = ""
API_NAME = ""
ENDPOINT = ""
def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT):
try:
apicall(endpoint, signing_key,
api=api, endpoint=endpoint, key=key
)
except urllib2.HTTPError as e:
if e.code == 404:
return None
else:
raise
## Instruction:
Make client key checking actually work.
## Code After:
from locksmith.common import apicall
import urllib2
try:
from django.conf import settings
SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY
API_NAME = settings.LOCKSMITH_API_NAME
ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/'
except:
SIGNING_KEY = ""
API_NAME = ""
ENDPOINT = ""
def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT):
try:
apicall(endpoint, signing_key,
api=api, key=key
)
return True
except urllib2.HTTPError as e:
if e.code == 404:
return None
else:
raise
|
...
from locksmith.common import apicall
import urllib2
...
from django.conf import settings
SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY
API_NAME = settings.LOCKSMITH_API_NAME
...
apicall(endpoint, signing_key,
api=api, key=key
)
return True
except urllib2.HTTPError as e:
...
|
f34d0d43311e51bcb04c5cbdf5bb31b7a8093feb
|
pyconde/tagging.py
|
pyconde/tagging.py
|
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through, model=model, instance=instance
)
return manager
|
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through,
model=model,
instance=instance,
prefetch_cache_name=self.name
)
return manager
|
Fix regression introduced by updating taggit (27971d6eed)
|
Fix regression introduced by updating taggit (27971d6eed)
django-taggit 0.11+ introduced support for prefetch_related which breaks
our taggit wrapping: alex/django-taggit@4f2e47f833
|
Python
|
bsd-3-clause
|
pysv/djep,pysv/djep,EuroPython/djep,pysv/djep,pysv/djep,pysv/djep,EuroPython/djep,EuroPython/djep,EuroPython/djep
|
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
- through=self.through, model=model, instance=instance
+ through=self.through,
+ model=model,
+ instance=instance,
+ prefetch_cache_name=self.name
)
return manager
|
Fix regression introduced by updating taggit (27971d6eed)
|
## Code Before:
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through, model=model, instance=instance
)
return manager
## Instruction:
Fix regression introduced by updating taggit (27971d6eed)
## Code After:
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through,
model=model,
instance=instance,
prefetch_cache_name=self.name
)
return manager
|
// ... existing code ...
manager = _TaggableManager(
through=self.through,
model=model,
instance=instance,
prefetch_cache_name=self.name
)
// ... rest of the code ...
|
a714511115bfee0fbdc6c70bd0abfceaa08384f6
|
idlk/__init__.py
|
idlk/__init__.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
from idlk import base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
"""
Compute the hash for the given byte string.
"""
result = 0
for char in data:
result = ((result << 8) + result) + _get_byte(char)
return result % 0xFFFEECED
def idlk(filename):
"""
Generate the lock file name for the given file.
"""
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base = os.path.splitext(macroman_name)[0]
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
Fix issues reported by pylint
|
Fix issues reported by pylint
|
Python
|
mit
|
znerol/py-idlk
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
- import idlk.base41
+ from idlk import base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
- h = 0
+ """
+ Compute the hash for the given byte string.
+ """
+ result = 0
- for c in data:
+ for char in data:
- h = ((h << 8) + h) + _get_byte(c)
+ result = ((result << 8) + result) + _get_byte(char)
- return h % 0xFFFEECED
+ return result % 0xFFFEECED
def idlk(filename):
+ """
+ Generate the lock file name for the given file.
+ """
+
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
- base, ext = os.path.splitext(macroman_name)
+ base = os.path.splitext(macroman_name)[0]
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
Fix issues reported by pylint
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
## Instruction:
Fix issues reported by pylint
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
from idlk import base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
"""
Compute the hash for the given byte string.
"""
result = 0
for char in data:
result = ((result << 8) + result) + _get_byte(char)
return result % 0xFFFEECED
def idlk(filename):
"""
Generate the lock file name for the given file.
"""
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base = os.path.splitext(macroman_name)[0]
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
// ... existing code ...
import unicodedata
from idlk import base41
// ... modified code ...
def hash_macroman(data):
"""
Compute the hash for the given byte string.
"""
result = 0
for char in data:
result = ((result << 8) + result) + _get_byte(char)
return result % 0xFFFEECED
...
def idlk(filename):
"""
Generate the lock file name for the given file.
"""
# Normalize to NFC.
...
hashed = base41.encode(hash_macroman(macroman_name))
base = os.path.splitext(macroman_name)[0]
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
// ... rest of the code ...
|
c600d1e1ad3cef69f6028afd64e14a04c747e1c6
|
tests/test_install.py
|
tests/test_install.py
|
import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['2.6.1', '--type', 'pypy']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['2.6.1', '--type', 'pypy']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
|
import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
|
Replace version of Python to install in test_{un,}install test
|
Replace version of Python to install in test_{un,}install test
PyPy 2.6.1's download link is not working anymore.
|
Python
|
mit
|
berdario/pew,berdario/pew
|
import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
- py_version = ['2.6.1', '--type', 'pypy']
+ py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
- py_version = ['2.6.1', '--type', 'pypy']
+ py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
|
Replace version of Python to install in test_{un,}install test
|
## Code Before:
import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['2.6.1', '--type', 'pypy']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['2.6.1', '--type', 'pypy']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
## Instruction:
Replace version of Python to install in test_{un,}install test
## Code After:
import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
|
// ... existing code ...
def test_install():
py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
// ... modified code ...
def test_uninstall():
py_version = ['3.5.1']
invoke('install', *py_version)
// ... rest of the code ...
|
679abfdd2b6a3c4d18170d93bfd42d73c47ff9c5
|
phasm/typing.py
|
phasm/typing.py
|
from typing import Mapping, Set, Callable, Union, Tuple, Iterable
# Pairwise local alignments
OrientedDNASegment = 'phasm.alignments.OrientedDNASegment'
OrientedRead = 'phasm.alignments.OrientedRead'
LocalAlignment = 'phasm.alignments.LocalAlignment'
AlignmentsT = Mapping[OrientedRead, Set[LocalAlignment]]
# Assembly Graphs
AssemblyGraph = 'phasm.assembly_graph.AssemblyGraph'
Node = OrientedDNASegment
Edge = Tuple[Node, Node]
Path = Iterable[Edge]
Bubble = Tuple[Node, Node]
# Phasing algorithm parameters
PruneParam = Union[float, Callable[[float], float]]
|
from typing import Mapping, Set, Callable, Union, Tuple, Iterable
# Pairwise local alignments
OrientedDNASegment = 'phasm.alignments.OrientedDNASegment'
OrientedRead = 'phasm.alignments.OrientedRead'
LocalAlignment = 'phasm.alignments.LocalAlignment'
AlignmentsT = Mapping[OrientedRead, Set[LocalAlignment]]
# Assembly Graphs
AssemblyGraph = 'phasm.assembly_graph.AssemblyGraph'
Node = Union[OrientedDNASegment, str]
Edge = Tuple[Node, Node]
Path = Iterable[Edge]
Bubble = Tuple[Node, Node]
# Phasing algorithm parameters
PruneParam = Union[float, Callable[[float], float]]
|
Change Node type a bit
|
Change Node type a bit
In a reconstructed assembly graph sometimes the nodes can be str
|
Python
|
mit
|
AbeelLab/phasm,AbeelLab/phasm
|
from typing import Mapping, Set, Callable, Union, Tuple, Iterable
# Pairwise local alignments
OrientedDNASegment = 'phasm.alignments.OrientedDNASegment'
OrientedRead = 'phasm.alignments.OrientedRead'
LocalAlignment = 'phasm.alignments.LocalAlignment'
AlignmentsT = Mapping[OrientedRead, Set[LocalAlignment]]
# Assembly Graphs
AssemblyGraph = 'phasm.assembly_graph.AssemblyGraph'
- Node = OrientedDNASegment
+ Node = Union[OrientedDNASegment, str]
Edge = Tuple[Node, Node]
Path = Iterable[Edge]
Bubble = Tuple[Node, Node]
# Phasing algorithm parameters
PruneParam = Union[float, Callable[[float], float]]
|
Change Node type a bit
|
## Code Before:
from typing import Mapping, Set, Callable, Union, Tuple, Iterable
# Pairwise local alignments
OrientedDNASegment = 'phasm.alignments.OrientedDNASegment'
OrientedRead = 'phasm.alignments.OrientedRead'
LocalAlignment = 'phasm.alignments.LocalAlignment'
AlignmentsT = Mapping[OrientedRead, Set[LocalAlignment]]
# Assembly Graphs
AssemblyGraph = 'phasm.assembly_graph.AssemblyGraph'
Node = OrientedDNASegment
Edge = Tuple[Node, Node]
Path = Iterable[Edge]
Bubble = Tuple[Node, Node]
# Phasing algorithm parameters
PruneParam = Union[float, Callable[[float], float]]
## Instruction:
Change Node type a bit
## Code After:
from typing import Mapping, Set, Callable, Union, Tuple, Iterable
# Pairwise local alignments
OrientedDNASegment = 'phasm.alignments.OrientedDNASegment'
OrientedRead = 'phasm.alignments.OrientedRead'
LocalAlignment = 'phasm.alignments.LocalAlignment'
AlignmentsT = Mapping[OrientedRead, Set[LocalAlignment]]
# Assembly Graphs
AssemblyGraph = 'phasm.assembly_graph.AssemblyGraph'
Node = Union[OrientedDNASegment, str]
Edge = Tuple[Node, Node]
Path = Iterable[Edge]
Bubble = Tuple[Node, Node]
# Phasing algorithm parameters
PruneParam = Union[float, Callable[[float], float]]
|
// ... existing code ...
AssemblyGraph = 'phasm.assembly_graph.AssemblyGraph'
Node = Union[OrientedDNASegment, str]
Edge = Tuple[Node, Node]
// ... rest of the code ...
|
9d1d99f8178252e91ae2ea62a20f6f4a104946fd
|
entities/base.py
|
entities/base.py
|
from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos
|
from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos
|
Add active flag to entities
|
Add active flag to entities
|
Python
|
mit
|
nephilahacks/spider-eats-the-kiwi
|
from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
+ self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
- def setSize(self, width, height):
+ def setSize(self, width, height):
- self.size = (width, height)
+ self.size = (width, height)
- def setPos(xpos, ypos):
+ def setPos(xpos, ypos):
- self.x = xpos
+ self.x = xpos
- self.y = ypos
+ self.y = ypos
|
Add active flag to entities
|
## Code Before:
from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos
## Instruction:
Add active flag to entities
## Code After:
from kivy.uix.widget import Widget
from kivy.core.window import Window
from kivy.graphics import Ellipse
from engine.entity import Entity
class BaseEntity(Widget, Entity):
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
Entity.__init__(self)
with self.canvas:
self.size = (Window.width*.002*25, Window.width*.002*25)
self.rect_bg = Ellipse(source=imageStr, pos=self.pos, size=self.size)
self.bind(pos=self.update_graphics_pos)
self.x = self.center_x
self.y = self.center_y
self.pos = (self.x, self.y)
self.rect_bg.pos = self.pos
def update(self):
self.move()
def update_graphics_pos(self, instance, value):
self.rect_bg.pos = value
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos
|
// ... existing code ...
def __init__(self, imageStr, **kwargs):
self.active = False
Widget.__init__(self, **kwargs)
// ... modified code ...
def setSize(self, width, height):
self.size = (width, height)
def setPos(xpos, ypos):
self.x = xpos
self.y = ypos
// ... rest of the code ...
|
267c17ce984952d16623b0305975626397529ca8
|
tests/config_test.py
|
tests/config_test.py
|
import pytest
from timewreport.config import TimeWarriorConfig
def test_get_value_should_return_value_if_key_available():
config = TimeWarriorConfig({'FOO': 'foo'})
assert config.get_value('FOO', 'bar') == 'foo'
def test_get_value_should_return_default_if_key_not_available():
config = TimeWarriorConfig({'BAR': 'foo'})
assert config.get_value('FOO', 'bar') == 'bar'
@pytest.fixture(scope='function', params=['on', 1, 'yes', 'y', 'true'])
def trueish_value(request):
return request.param
def test_get_boolean_should_return_true_on_trueish_values(trueish_value):
config = TimeWarriorConfig({'KEY': trueish_value})
assert config.get_boolean('KEY', False) is True
def test_get_boolean_should_return_false_on_falseish_values():
config = TimeWarriorConfig({'KEY': 'foo'})
assert config.get_boolean('KEY', True) is False
|
import pytest
from timewreport.config import TimeWarriorConfig
def test_get_value_should_return_value_if_key_available():
config = TimeWarriorConfig({'FOO': 'foo'})
assert config.get_value('FOO', 'bar') == 'foo'
def test_get_value_should_return_default_if_key_not_available():
config = TimeWarriorConfig({'BAR': 'foo'})
assert config.get_value('FOO', 'bar') == 'bar'
@pytest.fixture(scope='function', params=['on', 1, 'yes', 'y', 'true'])
def trueish_value(request):
return request.param
def test_get_boolean_should_return_true_on_trueish_values(trueish_value):
config = TimeWarriorConfig({'KEY': trueish_value})
assert config.get_boolean('KEY', False) is True
@pytest.fixture(scope='function', params=['off', 0, 'no', 'n', 'false'])
def falseish_value(request):
return request.param
def test_get_boolean_should_return_false_on_falseish_values(falseish_value):
config = TimeWarriorConfig({'KEY': falseish_value})
assert config.get_boolean('KEY', True) is False
|
Add tests for falseish config values
|
Add tests for falseish config values
|
Python
|
mit
|
lauft/timew-report
|
import pytest
from timewreport.config import TimeWarriorConfig
def test_get_value_should_return_value_if_key_available():
config = TimeWarriorConfig({'FOO': 'foo'})
assert config.get_value('FOO', 'bar') == 'foo'
def test_get_value_should_return_default_if_key_not_available():
config = TimeWarriorConfig({'BAR': 'foo'})
assert config.get_value('FOO', 'bar') == 'bar'
@pytest.fixture(scope='function', params=['on', 1, 'yes', 'y', 'true'])
def trueish_value(request):
return request.param
def test_get_boolean_should_return_true_on_trueish_values(trueish_value):
config = TimeWarriorConfig({'KEY': trueish_value})
assert config.get_boolean('KEY', False) is True
+ @pytest.fixture(scope='function', params=['off', 0, 'no', 'n', 'false'])
+ def falseish_value(request):
+ return request.param
+
+
- def test_get_boolean_should_return_false_on_falseish_values():
+ def test_get_boolean_should_return_false_on_falseish_values(falseish_value):
- config = TimeWarriorConfig({'KEY': 'foo'})
+ config = TimeWarriorConfig({'KEY': falseish_value})
assert config.get_boolean('KEY', True) is False
|
Add tests for falseish config values
|
## Code Before:
import pytest
from timewreport.config import TimeWarriorConfig
def test_get_value_should_return_value_if_key_available():
config = TimeWarriorConfig({'FOO': 'foo'})
assert config.get_value('FOO', 'bar') == 'foo'
def test_get_value_should_return_default_if_key_not_available():
config = TimeWarriorConfig({'BAR': 'foo'})
assert config.get_value('FOO', 'bar') == 'bar'
@pytest.fixture(scope='function', params=['on', 1, 'yes', 'y', 'true'])
def trueish_value(request):
return request.param
def test_get_boolean_should_return_true_on_trueish_values(trueish_value):
config = TimeWarriorConfig({'KEY': trueish_value})
assert config.get_boolean('KEY', False) is True
def test_get_boolean_should_return_false_on_falseish_values():
config = TimeWarriorConfig({'KEY': 'foo'})
assert config.get_boolean('KEY', True) is False
## Instruction:
Add tests for falseish config values
## Code After:
import pytest
from timewreport.config import TimeWarriorConfig
def test_get_value_should_return_value_if_key_available():
config = TimeWarriorConfig({'FOO': 'foo'})
assert config.get_value('FOO', 'bar') == 'foo'
def test_get_value_should_return_default_if_key_not_available():
config = TimeWarriorConfig({'BAR': 'foo'})
assert config.get_value('FOO', 'bar') == 'bar'
@pytest.fixture(scope='function', params=['on', 1, 'yes', 'y', 'true'])
def trueish_value(request):
return request.param
def test_get_boolean_should_return_true_on_trueish_values(trueish_value):
config = TimeWarriorConfig({'KEY': trueish_value})
assert config.get_boolean('KEY', False) is True
@pytest.fixture(scope='function', params=['off', 0, 'no', 'n', 'false'])
def falseish_value(request):
return request.param
def test_get_boolean_should_return_false_on_falseish_values(falseish_value):
config = TimeWarriorConfig({'KEY': falseish_value})
assert config.get_boolean('KEY', True) is False
|
...
@pytest.fixture(scope='function', params=['off', 0, 'no', 'n', 'false'])
def falseish_value(request):
return request.param
def test_get_boolean_should_return_false_on_falseish_values(falseish_value):
config = TimeWarriorConfig({'KEY': falseish_value})
...
|
bc399ed6902f6ba3d24e1ce1a8ff88a259793c3a
|
Artifactorial/urls.py
|
Artifactorial/urls.py
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns('Artifactorial.views',
url(r'^artifacts/(?P<filename>.*)$', 'artifacts', name='artifacts'),
url(r'^shared/(?P<token>.*)$', 'shared', name='shared'))
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
import Artifactorial.views as a_views
urlpatterns = [
url(r'^artifacts/(?P<filename>.*)$', a_views.artifacts, name='artifacts'),
url(r'^shared/(?P<token>.*)$', a_views.shared, name='shared')
]
|
Use the new url pattern
|
Use the new url pattern
|
Python
|
mit
|
ivoire/Artifactorial,ivoire/Artifactorial,ivoire/Artifactorial
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
+ import Artifactorial.views as a_views
- urlpatterns = patterns('Artifactorial.views',
- url(r'^artifacts/(?P<filename>.*)$', 'artifacts', name='artifacts'),
- url(r'^shared/(?P<token>.*)$', 'shared', name='shared'))
+ urlpatterns = [
+ url(r'^artifacts/(?P<filename>.*)$', a_views.artifacts, name='artifacts'),
+ url(r'^shared/(?P<token>.*)$', a_views.shared, name='shared')
+ ]
+
|
Use the new url pattern
|
## Code Before:
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns('Artifactorial.views',
url(r'^artifacts/(?P<filename>.*)$', 'artifacts', name='artifacts'),
url(r'^shared/(?P<token>.*)$', 'shared', name='shared'))
## Instruction:
Use the new url pattern
## Code After:
from __future__ import unicode_literals
from django.conf.urls import patterns, url
import Artifactorial.views as a_views
urlpatterns = [
url(r'^artifacts/(?P<filename>.*)$', a_views.artifacts, name='artifacts'),
url(r'^shared/(?P<token>.*)$', a_views.shared, name='shared')
]
|
// ... existing code ...
import Artifactorial.views as a_views
urlpatterns = [
url(r'^artifacts/(?P<filename>.*)$', a_views.artifacts, name='artifacts'),
url(r'^shared/(?P<token>.*)$', a_views.shared, name='shared')
]
// ... rest of the code ...
|
6064db3000f2aeec66a775345d22b8a2b421497f
|
astropy/utils/tests/test_gzip.py
|
astropy/utils/tests/test_gzip.py
|
import io
import os
from ...tests.helper import pytest
from .. import gzip
def test_gzip(tmpdir):
fd = gzip.GzipFile(str(tmpdir.join("test.gz")), 'wb')
fd = io.TextIOWrapper(fd, encoding='utf8')
|
import io
import os
from ...tests.helper import pytest
from .. import gzip
pytestmark = pytest.mark.skipif("sys.version_info < (3,0)")
def test_gzip(tmpdir):
fd = gzip.GzipFile(str(tmpdir.join("test.gz")), 'wb')
fd = io.TextIOWrapper(fd, encoding='utf8')
|
Fix gzip test for Python 2.6
|
Fix gzip test for Python 2.6
|
Python
|
bsd-3-clause
|
tbabej/astropy,bsipocz/astropy,lpsinger/astropy,MSeifert04/astropy,StuartLittlefair/astropy,larrybradley/astropy,DougBurke/astropy,stargaser/astropy,pllim/astropy,stargaser/astropy,MSeifert04/astropy,tbabej/astropy,lpsinger/astropy,joergdietrich/astropy,astropy/astropy,joergdietrich/astropy,dhomeier/astropy,kelle/astropy,kelle/astropy,saimn/astropy,MSeifert04/astropy,bsipocz/astropy,funbaker/astropy,kelle/astropy,pllim/astropy,dhomeier/astropy,StuartLittlefair/astropy,pllim/astropy,dhomeier/astropy,MSeifert04/astropy,larrybradley/astropy,mhvk/astropy,funbaker/astropy,StuartLittlefair/astropy,stargaser/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,astropy/astropy,AustereCuriosity/astropy,tbabej/astropy,DougBurke/astropy,AustereCuriosity/astropy,funbaker/astropy,pllim/astropy,lpsinger/astropy,larrybradley/astropy,funbaker/astropy,dhomeier/astropy,mhvk/astropy,joergdietrich/astropy,saimn/astropy,kelle/astropy,lpsinger/astropy,stargaser/astropy,mhvk/astropy,bsipocz/astropy,StuartLittlefair/astropy,dhomeier/astropy,saimn/astropy,mhvk/astropy,joergdietrich/astropy,astropy/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,astropy/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,mhvk/astropy,aleksandr-bakanov/astropy,saimn/astropy,larrybradley/astropy,larrybradley/astropy,tbabej/astropy,pllim/astropy,bsipocz/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,astropy/astropy,kelle/astropy,lpsinger/astropy,AustereCuriosity/astropy,tbabej/astropy,saimn/astropy,DougBurke/astropy
|
import io
import os
from ...tests.helper import pytest
from .. import gzip
+ pytestmark = pytest.mark.skipif("sys.version_info < (3,0)")
+
def test_gzip(tmpdir):
fd = gzip.GzipFile(str(tmpdir.join("test.gz")), 'wb')
fd = io.TextIOWrapper(fd, encoding='utf8')
|
Fix gzip test for Python 2.6
|
## Code Before:
import io
import os
from ...tests.helper import pytest
from .. import gzip
def test_gzip(tmpdir):
fd = gzip.GzipFile(str(tmpdir.join("test.gz")), 'wb')
fd = io.TextIOWrapper(fd, encoding='utf8')
## Instruction:
Fix gzip test for Python 2.6
## Code After:
import io
import os
from ...tests.helper import pytest
from .. import gzip
pytestmark = pytest.mark.skipif("sys.version_info < (3,0)")
def test_gzip(tmpdir):
fd = gzip.GzipFile(str(tmpdir.join("test.gz")), 'wb')
fd = io.TextIOWrapper(fd, encoding='utf8')
|
# ... existing code ...
pytestmark = pytest.mark.skipif("sys.version_info < (3,0)")
def test_gzip(tmpdir):
# ... rest of the code ...
|
f232481f966580bfd54ddd0eeb781badda3a9394
|
swh/web/add_forge_now/apps.py
|
swh/web/add_forge_now/apps.py
|
from django.apps import AppConfig
class AddForgeNowConfig(AppConfig):
name = "add_forge_now"
|
from django.apps import AppConfig
class AddForgeNowConfig(AppConfig):
name = "swh.web.add_forge_now"
label = "swh_web_add_forge_now"
|
Rename django app and add missing app_label
|
add_forge_now: Rename django app and add missing app_label
This fixes errors when using django 3.2 and thus the packaging
on debian unstable
|
Python
|
agpl-3.0
|
SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui
|
from django.apps import AppConfig
class AddForgeNowConfig(AppConfig):
- name = "add_forge_now"
+ name = "swh.web.add_forge_now"
+ label = "swh_web_add_forge_now"
|
Rename django app and add missing app_label
|
## Code Before:
from django.apps import AppConfig
class AddForgeNowConfig(AppConfig):
name = "add_forge_now"
## Instruction:
Rename django app and add missing app_label
## Code After:
from django.apps import AppConfig
class AddForgeNowConfig(AppConfig):
name = "swh.web.add_forge_now"
label = "swh_web_add_forge_now"
|
...
class AddForgeNowConfig(AppConfig):
name = "swh.web.add_forge_now"
label = "swh_web_add_forge_now"
...
|
421dbe962dae44cad7aa734a397cb16fe9b1632f
|
reactive/datanode.py
|
reactive/datanode.py
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
Update charms.hadoop reference to follow convention
|
Update charms.hadoop reference to follow convention
|
Python
|
apache-2.0
|
johnsca/layer-apache-hadoop-datanode,juju-solutions/layer-apache-hadoop-datanode
|
from charms.reactive import when, when_not, set_state, remove_state
- from charms.hadoop import get_hadoop_base
+ from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
Update charms.hadoop reference to follow convention
|
## Code Before:
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
## Instruction:
Update charms.hadoop reference to follow convention
## Code After:
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
...
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
...
|
627729380b8fbd6d1b4e4eec0362418dbf698d55
|
libs/qpanel/upgrader.py
|
libs/qpanel/upgrader.py
|
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = '[email protected]:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = '[email protected]:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
Change url to get stable version number
|
Change url to get stable version number
|
Python
|
mit
|
roramirez/qpanel,roramirez/qpanel,skazancev/qpanel,skazancev/qpanel,skazancev/qpanel,roramirez/qpanel,roramirez/qpanel,skazancev/qpanel
|
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = '[email protected]:roramirez/qpanel.git'
+ URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
- URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
- '/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
Change url to get stable version number
|
## Code Before:
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = '[email protected]:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
## Instruction:
Change url to get stable version number
## Code After:
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = '[email protected]:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
# ... existing code ...
REPO = '[email protected]:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
# ... rest of the code ...
|
3372bade0c5aee8c30c507832c842d6533608f61
|
porunga/tests/test_main.py
|
porunga/tests/test_main.py
|
import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
self.assertIn('test', commands)
test_command = commands['test']
self.assertIsInstance(test_command, PorungaTestCommand)
|
import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
self.assertTrue('test' in commands)
test_command = commands['test']
self.assertTrue(isinstance(test_command, PorungaTestCommand))
|
Test updated to work with Python 2.6
|
Test updated to work with Python 2.6
|
Python
|
bsd-2-clause
|
lukaszb/porunga,lukaszb/porunga
|
import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
- self.assertIn('test', commands)
+ self.assertTrue('test' in commands)
test_command = commands['test']
- self.assertIsInstance(test_command, PorungaTestCommand)
+ self.assertTrue(isinstance(test_command, PorungaTestCommand))
|
Test updated to work with Python 2.6
|
## Code Before:
import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
self.assertIn('test', commands)
test_command = commands['test']
self.assertIsInstance(test_command, PorungaTestCommand)
## Instruction:
Test updated to work with Python 2.6
## Code After:
import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
self.assertTrue('test' in commands)
test_command = commands['test']
self.assertTrue(isinstance(test_command, PorungaTestCommand))
|
# ... existing code ...
self.assertTrue('test' in commands)
test_command = commands['test']
self.assertTrue(isinstance(test_command, PorungaTestCommand))
# ... rest of the code ...
|
4e74723aac53956fb0316ae0d438da623de133d5
|
tests/extensions/video/test_renderer.py
|
tests/extensions/video/test_renderer.py
|
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234', 'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234',
'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
assert '<style>body{margin:0;padding:0;}</style>' in ''.join(body.split())
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
Add and update tests for video renderer
|
Add and update tests for video renderer
|
Python
|
apache-2.0
|
felliott/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,CenterForOpenScience/modular-file-renderer,CenterForOpenScience/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,felliott/modular-file-renderer
|
import pytest
from mfr.core.provider import ProviderMetadata
-
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
- return ProviderMetadata('test', '.mp4', 'text/plain', '1234', 'http://wb.osf.io/file/test.mp4?token=1234')
+ return ProviderMetadata('test', '.mp4', 'text/plain', '1234',
+ 'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
+ assert '<style>body{margin:0;padding:0;}</style>' in ''.join(body.split())
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
Add and update tests for video renderer
|
## Code Before:
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234', 'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
## Instruction:
Add and update tests for video renderer
## Code After:
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234',
'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
assert '<style>body{margin:0;padding:0;}</style>' in ''.join(body.split())
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
// ... existing code ...
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
// ... modified code ...
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234',
'http://wb.osf.io/file/test.mp4?token=1234')
...
assert 'src="{}"'.format(metadata().download_url) in body
assert '<style>body{margin:0;padding:0;}</style>' in ''.join(body.split())
// ... rest of the code ...
|
1431f45e6b605e54f1ec341114b53ae047e48be7
|
token_names.py
|
token_names.py
|
INTEGER, PLUS, MINUS, MULTIPLY, DIVIDE, LPAREN, RPAREN, EOF, OPEN, CLOSE, BANG, ASSIGN, SEMI, ID = (
'INTEGER',
'PLUS',
'MINUS',
'MULTIPLY',
'DIVIDE',
'LPAREN',
'RPAREN',
'EOF',
'OPEN',
'CLOSE',
'BANG',
'ASSIGN',
'SEMI',
'ID'
)
|
ASSIGN = 'ASSIGN'
BANG = 'BANG'
CLOSE = 'CLOSE'
DIVIDE = 'DIVIDE'
EOF = 'EOF'
ID = 'ID'
INTEGER = 'INTEGER'
LPAREN = 'LPAREN'
MINUS = 'MINUS'
MULTIPLY = 'MULTIPLY'
OPEN = 'OPEN'
PLUS = 'PLUS'
RPAREN = 'RPAREN'
SEMI = 'SEMI'
|
Fix token names format for readability.
|
Fix token names format for readability.
|
Python
|
mit
|
doubledherin/my_compiler
|
+ ASSIGN = 'ASSIGN'
+ BANG = 'BANG'
+ CLOSE = 'CLOSE'
+ DIVIDE = 'DIVIDE'
+ EOF = 'EOF'
+ ID = 'ID'
+ INTEGER = 'INTEGER'
+ LPAREN = 'LPAREN'
+ MINUS = 'MINUS'
+ MULTIPLY = 'MULTIPLY'
+ OPEN = 'OPEN'
+ PLUS = 'PLUS'
+ RPAREN = 'RPAREN'
+ SEMI = 'SEMI'
- INTEGER, PLUS, MINUS, MULTIPLY, DIVIDE, LPAREN, RPAREN, EOF, OPEN, CLOSE, BANG, ASSIGN, SEMI, ID = (
- 'INTEGER',
- 'PLUS',
- 'MINUS',
- 'MULTIPLY',
- 'DIVIDE',
- 'LPAREN',
- 'RPAREN',
- 'EOF',
- 'OPEN',
- 'CLOSE',
- 'BANG',
- 'ASSIGN',
- 'SEMI',
- 'ID'
- )
|
Fix token names format for readability.
|
## Code Before:
INTEGER, PLUS, MINUS, MULTIPLY, DIVIDE, LPAREN, RPAREN, EOF, OPEN, CLOSE, BANG, ASSIGN, SEMI, ID = (
'INTEGER',
'PLUS',
'MINUS',
'MULTIPLY',
'DIVIDE',
'LPAREN',
'RPAREN',
'EOF',
'OPEN',
'CLOSE',
'BANG',
'ASSIGN',
'SEMI',
'ID'
)
## Instruction:
Fix token names format for readability.
## Code After:
ASSIGN = 'ASSIGN'
BANG = 'BANG'
CLOSE = 'CLOSE'
DIVIDE = 'DIVIDE'
EOF = 'EOF'
ID = 'ID'
INTEGER = 'INTEGER'
LPAREN = 'LPAREN'
MINUS = 'MINUS'
MULTIPLY = 'MULTIPLY'
OPEN = 'OPEN'
PLUS = 'PLUS'
RPAREN = 'RPAREN'
SEMI = 'SEMI'
|
// ... existing code ...
ASSIGN = 'ASSIGN'
BANG = 'BANG'
CLOSE = 'CLOSE'
DIVIDE = 'DIVIDE'
EOF = 'EOF'
ID = 'ID'
INTEGER = 'INTEGER'
LPAREN = 'LPAREN'
MINUS = 'MINUS'
MULTIPLY = 'MULTIPLY'
OPEN = 'OPEN'
PLUS = 'PLUS'
RPAREN = 'RPAREN'
SEMI = 'SEMI'
// ... rest of the code ...
|
2ef4362be90e2314b69a2ff17ccb5d25ef8905fd
|
rackspace/database/database_service.py
|
rackspace/database/database_service.py
|
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
|
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
if not version:
version = "v1"
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
|
Set default version for cloud databases.
|
Set default version for cloud databases.
|
Python
|
apache-2.0
|
rackerlabs/rackspace-sdk-plugin,briancurtin/rackspace-sdk-plugin
|
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
+ if not version:
+ version = "v1"
+
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
|
Set default version for cloud databases.
|
## Code Before:
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
## Instruction:
Set default version for cloud databases.
## Code After:
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
if not version:
version = "v1"
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
|
# ... existing code ...
if not version:
version = "v1"
super(DatabaseService, self).__init__(service_type="rax:database",
# ... rest of the code ...
|
d8ae3ab5f6baf0ee965548f8df37e1a4b331a8aa
|
install_all_addons.py
|
install_all_addons.py
|
import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
Update install script with full file paths
|
Update install script with full file paths
This is needed to make the script run on Windows. The `os` package is
used to make sure it will run under any OS.
|
Python
|
mit
|
TactileUniverse/3D-Printed-Galaxy-Software
|
import bpy
+ import os
+
+ # get current directory
+ current_dir = os.getcwd()
# install and activate `emboss plane`
+ emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
- bpy.ops.wm.addon_install(filepath='emboss_plane.py')
+ bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
+ name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
- bpy.ops.wm.addon_install(filepath='name_plate.py')
+ bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
Update install script with full file paths
|
## Code Before:
import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
## Instruction:
Update install script with full file paths
## Code After:
import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
# ... existing code ...
import bpy
import os
# get current directory
current_dir = os.getcwd()
# ... modified code ...
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
...
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# ... rest of the code ...
|
fe98a627943c235ba24fc6de781deec69e7fd02e
|
relayer/__init__.py
|
relayer/__init__.py
|
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
producer = KafkaProducer(bootstrap_servers=kafka_hosts)
emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self.emitter.flush()
|
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(self._emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self._emitter.flush()
|
Save event emitter y producer reference in relayer instance
|
Save event emitter y producer reference in relayer instance
|
Python
|
mit
|
wizeline/relayer
|
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
- producer = KafkaProducer(bootstrap_servers=kafka_hosts)
+ self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
- emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
+ self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
- self.context = context_handler_class(emitter)
+ self.context = context_handler_class(self._emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
- self.emitter.flush()
+ self._emitter.flush()
|
Save event emitter y producer reference in relayer instance
|
## Code Before:
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
producer = KafkaProducer(bootstrap_servers=kafka_hosts)
emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self.emitter.flush()
## Instruction:
Save event emitter y producer reference in relayer instance
## Code After:
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(self._emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self._emitter.flush()
|
// ... existing code ...
self.source = source
self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(self._emitter)
// ... modified code ...
def flush(self):
self._emitter.flush()
// ... rest of the code ...
|
f2a88e4849876970c29b568b897dff88ffe09306
|
djrichtextfield/urls.py
|
djrichtextfield/urls.py
|
from django.conf.urls import url
from djrichtextfield.views import InitView
urlpatterns = [
url('^init.js$', InitView.as_view(), name='djrichtextfield_init')
]
|
from django.urls import path
from djrichtextfield.views import InitView
urlpatterns = [
path('init.js', InitView.as_view(), name='djrichtextfield_init')
]
|
Use path instead of soon to be deprecated url
|
Use path instead of soon to be deprecated url
|
Python
|
mit
|
jaap3/django-richtextfield,jaap3/django-richtextfield
|
- from django.conf.urls import url
+ from django.urls import path
from djrichtextfield.views import InitView
urlpatterns = [
- url('^init.js$', InitView.as_view(), name='djrichtextfield_init')
+ path('init.js', InitView.as_view(), name='djrichtextfield_init')
]
|
Use path instead of soon to be deprecated url
|
## Code Before:
from django.conf.urls import url
from djrichtextfield.views import InitView
urlpatterns = [
url('^init.js$', InitView.as_view(), name='djrichtextfield_init')
]
## Instruction:
Use path instead of soon to be deprecated url
## Code After:
from django.urls import path
from djrichtextfield.views import InitView
urlpatterns = [
path('init.js', InitView.as_view(), name='djrichtextfield_init')
]
|
// ... existing code ...
from django.urls import path
// ... modified code ...
urlpatterns = [
path('init.js', InitView.as_view(), name='djrichtextfield_init')
]
// ... rest of the code ...
|
62f6e116306901aedaa738236075c4faa00db74d
|
tests/config_test.py
|
tests/config_test.py
|
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
|
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
|
Fix module path (config -> config_yaml) to unbreak test.
|
Fix module path (config -> config_yaml) to unbreak test.
|
Python
|
apache-2.0
|
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
|
import glob
import json
import os
import unittest
# Local imports
- import config
+ import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
- expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
+ expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
|
Fix module path (config -> config_yaml) to unbreak test.
|
## Code Before:
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix module path (config -> config_yaml) to unbreak test.
## Code After:
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
# Local imports
import config_yaml
// ... modified code ...
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
// ... rest of the code ...
|
eb1fdf3419bdfd1d5920d73a877f707162b783b0
|
cfgrib/__init__.py
|
cfgrib/__init__.py
|
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import (
Dataset,
DatasetBuildError,
open_container,
open_file,
open_fileindex,
open_from_index,
)
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
Drop unused and dangerous entrypoint `open_fileindex`
|
Drop unused and dangerous entrypoint `open_fileindex`
|
Python
|
apache-2.0
|
ecmwf/cfgrib
|
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
+ from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
- from .dataset import (
- Dataset,
- DatasetBuildError,
- open_container,
- open_file,
- open_fileindex,
- open_from_index,
- )
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
Drop unused and dangerous entrypoint `open_fileindex`
|
## Code Before:
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import (
Dataset,
DatasetBuildError,
open_container,
open_file,
open_fileindex,
open_from_index,
)
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
## Instruction:
Drop unused and dangerous entrypoint `open_fileindex`
## Code After:
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
// ... existing code ...
from .cfmessage import CfMessage
from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
from .messages import FileStream, Message
// ... rest of the code ...
|
76e436daef154bdf6acd1b0569f6fa2baa61addd
|
pyxform/tests_v1/test_audit.py
|
pyxform/tests_v1/test_audit.py
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
Add test for blank audit name.
|
Add test for blank audit name.
|
Python
|
bsd-2-clause
|
XLSForm/pyxform,XLSForm/pyxform
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
+
+ def test_audit_blank_name(self):
+ self.assertPyxformXform(
+ name="meta_audit",
+ md="""
+ | survey | | | |
+ | | type | name | label |
+ | | audit | | |
+ """,
+ xml__contains=[
+ '<meta>',
+ '<audit/>',
+ '</meta>',
+ '<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
+ )
|
Add test for blank audit name.
|
## Code Before:
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
## Instruction:
Add test for blank audit name.
## Code After:
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
...
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
...
|
1cf7b11cdb12a135f2dfa99d7e625eb160b0d7c2
|
apps/orders/models.py
|
apps/orders/models.py
|
from django.db import models
# Create your models here.
|
from django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
|
Create Order and OrderItem Models
|
Create Order and OrderItem Models
|
Python
|
mit
|
samitnuk/online_shop,samitnuk/online_shop,samitnuk/online_shop
|
from django.db import models
- # Create your models here.
+ from ..shop.models import Product
+
+ class Order(models.Model):
+ first_name = models.CharField(verbose_name="Ім,я", max_length=50)
+ last_name = models.CharField(verbose_name="Прізвище", max_length=50)
+ email = models.EmailField(verbose_name="Email")
+ address = models.CharField(verbose_name="Адреса", max_length=250)
+ postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
+ city = models.CharField(verbose_name="Місто", max_length=100)
+ created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
+ updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
+ paid = models.BooleanField(verbose_name="Оплачене", default=False)
+
+ class Meta:
+ ordering = ('-created', )
+ verbose_name = "Замовлення"
+ verbose_name_plural = "Замовлення"
+
+ def __str__(self):
+ return "Замовлення: {}".format(self.id)
+
+ def get_total_cost(self):
+ return sum(item.get_cost() for item in self.items.all())
+
+
+ class OrderItem(models.Model):
+ order = models.ForeignKey(Order, related_name="items")
+ product = models.ForeignKey(Product, related_name="order_items")
+ price = models.DecimalField(verbose_name="Ціна", max_digits=10,
+ decimal_places=2)
+ quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
+
+ def __str__(self):
+ return '{}'.format(self.id)
+
+ def get_cost(self):
+ return self.price * self.quantity
+
|
Create Order and OrderItem Models
|
## Code Before:
from django.db import models
# Create your models here.
## Instruction:
Create Order and OrderItem Models
## Code After:
from django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
|
# ... existing code ...
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
# ... rest of the code ...
|
5a88126b53bbd47a4c8899b50bdbf0d913183bd5
|
norm/test/test_porcelain.py
|
norm/test/test_porcelain.py
|
from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into foo (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from foo where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
|
from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
class SqliteTest(TestCase):
timeout = 2
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool('sqlite:')
yield pool.runOperation('''CREATE TABLE porc1 (
id integer primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
|
Fix postgres porcelain test and add sqlite one
|
Fix postgres porcelain test and add sqlite one
|
Python
|
mit
|
iffy/norm,iffy/norm
|
from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
- d = cursor.execute('insert into foo (name) values (?)', (name,))
+ d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
- rows = yield pool.runQuery('select id, name from foo where id = ?', (rowid,))
+ rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
+
+ class SqliteTest(TestCase):
+
+
+ timeout = 2
+
+
+ @defer.inlineCallbacks
+ def test_basic(self):
+ pool = yield makePool('sqlite:')
+ yield pool.runOperation('''CREATE TABLE porc1 (
+ id integer primary key,
+ created timestamp default current_timestamp,
+ name text
+ )''')
+
+ def interaction(cursor, name):
+ d = cursor.execute('insert into porc1 (name) values (?)', (name,))
+ d.addCallback(lambda _: cursor.lastRowId())
+ return d
+ rowid = yield pool.runInteraction(interaction, 'bob')
+ rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
+ self.assertEqual(rows, [(rowid, 'bob')])
|
Fix postgres porcelain test and add sqlite one
|
## Code Before:
from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into foo (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from foo where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
## Instruction:
Fix postgres porcelain test and add sqlite one
## Code After:
from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
class SqliteTest(TestCase):
timeout = 2
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool('sqlite:')
yield pool.runOperation('''CREATE TABLE porc1 (
id integer primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
|
// ... existing code ...
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
// ... modified code ...
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
...
class SqliteTest(TestCase):
timeout = 2
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool('sqlite:')
yield pool.runOperation('''CREATE TABLE porc1 (
id integer primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
// ... rest of the code ...
|
2c03171b75b6bb4f3a77d3b46ee8fd1e5b022077
|
template_engine/jinja2_filters.py
|
template_engine/jinja2_filters.py
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
if type(s) is int:
return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
Fix type error if input is int
|
Fix type error if input is int
|
Python
|
mit
|
bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance
|
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
+ if type(s) is int:
+ return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
Fix type error if input is int
|
## Code Before:
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
## Instruction:
Fix type error if input is int
## Code After:
from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
if type(s) is int:
return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
# ... existing code ...
return ''
if type(s) is int:
return s
return re.sub('[^0-9]', '', s)
# ... rest of the code ...
|
fc7c08aecf9d247e54db70ae14c999902d6f6bfa
|
workflow/migrations/0024_auto_20180620_0537.py
|
workflow/migrations/0024_auto_20180620_0537.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
Fix the dashboard migration for UUID
|
Fix the dashboard migration for UUID
|
Python
|
apache-2.0
|
toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
- field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
+ field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
Fix the dashboard migration for UUID
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
## Instruction:
Fix the dashboard migration for UUID
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
# ... existing code ...
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
# ... rest of the code ...
|
f16d93216e1f0890b0551ca3b741130bb12781ef
|
gold_digger/settings/__init__.py
|
gold_digger/settings/__init__.py
|
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
profile = environ.get("GOLD_DIGGER_PROFILE", "local")
if profile == "master":
from ._settings_master import *
elif profile == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
"Local configuration not found. Create file _settings_local.py in {} directory according to README.".format(
path.abspath(path.join(__file__, path.pardir))
)
)
else:
raise ValueError("Unsupported settings profile. Got: {}. Use one of: master, staging, local.".format(profile))
|
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
|
Make global variable upper-case and use f-strings
|
Make global variable upper-case and use f-strings
|
Python
|
apache-2.0
|
business-factory/gold-digger
|
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
- profile = environ.get("GOLD_DIGGER_PROFILE", "local")
+ PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
- if profile == "master":
+ if PROFILE == "master":
from ._settings_master import *
- elif profile == "local":
+ elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
- "Local configuration not found. Create file _settings_local.py in {} directory according to README.".format(
+ f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
- path.abspath(path.join(__file__, path.pardir))
- )
)
else:
- raise ValueError("Unsupported settings profile. Got: {}. Use one of: master, staging, local.".format(profile))
+ raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
|
Make global variable upper-case and use f-strings
|
## Code Before:
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
profile = environ.get("GOLD_DIGGER_PROFILE", "local")
if profile == "master":
from ._settings_master import *
elif profile == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
"Local configuration not found. Create file _settings_local.py in {} directory according to README.".format(
path.abspath(path.join(__file__, path.pardir))
)
)
else:
raise ValueError("Unsupported settings profile. Got: {}. Use one of: master, staging, local.".format(profile))
## Instruction:
Make global variable upper-case and use f-strings
## Code After:
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
|
// ... existing code ...
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
// ... modified code ...
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
...
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
// ... rest of the code ...
|
e8a0e7c3714445577851c5a84ecf7a036937725a
|
clang_corpus/__init__.py
|
clang_corpus/__init__.py
|
from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
Add an include_paths property to the SourceFile class.
|
Add an include_paths property to the SourceFile class.
|
Python
|
unlicense
|
jwiggins/clang-corpus,jwiggins/clang-corpus,jwiggins/clang-corpus
|
from os import listdir
- from os.path import abspath, isfile, join, splitext
+ from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
+
+ @property
+ def include_paths(self):
+ return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
Add an include_paths property to the SourceFile class.
|
## Code Before:
from os import listdir
from os.path import abspath, isfile, join, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
## Instruction:
Add an include_paths property to the SourceFile class.
## Code After:
from os import listdir
from os.path import abspath, isfile, join, split, splitext
# C, C++, Obj-C, & Obj-C++
SOURCE_EXTENSIONS = ('.h', '.hh', '.hpp', '.c', '.cpp', '.cxx', '.m', '.mm')
class SourceFile(object):
""" A simple object which wraps a text file.
"""
def __init__(self, path):
self._path = abspath(path)
@property
def path(self):
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
@property
def bytes(self):
with open(self._path, "rb") as fp:
bytes = fp.read()
return bytes
def create_package_modules(package_path, module_dict):
""" Populate a module dictionary with `SourceFile` objects for each source
file in a directory.
"""
package_path = abspath(package_path)
for filename in listdir(package_path):
file_path = join(package_path, filename)
key, ext = splitext(filename)
if isfile(file_path) and ext in SOURCE_EXTENSIONS:
key, ext = splitext(filename)
module_dict[key] = SourceFile(file_path)
|
...
from os import listdir
from os.path import abspath, isfile, join, split, splitext
...
return self._path
@property
def include_paths(self):
return [split(self._path)[0]]
...
|
8623aae8778307648e4a0380d84ca7dc7a63f3f2
|
oneflow/core/context_processors.py
|
oneflow/core/context_processors.py
|
from .models.nonrel import User
def mongodb_user(request):
if request.user.is_anonymous():
return {u'mongodb_user': None}
try:
mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id'])
except KeyError:
mongodb_user = User.objects.get(django_user=request.user.id)
# Cache it for next time.
request.session[u'mongodb_user_id'] = mongodb_user.id
return {u'mongodb_user': mongodb_user}
|
def mongodb_user(request):
""" not the most usefull context manager in the world. """
if request.user.is_anonymous():
return {u'mongodb_user': None}
return {u'mongodb_user': request.user.mongo}
|
Simplify the context processor. Not very useful anymore, in fact.
|
Simplify the context processor. Not very useful anymore, in fact.
|
Python
|
agpl-3.0
|
1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow
|
-
- from .models.nonrel import User
def mongodb_user(request):
+ """ not the most usefull context manager in the world. """
if request.user.is_anonymous():
return {u'mongodb_user': None}
+ return {u'mongodb_user': request.user.mongo}
- try:
- mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id'])
- except KeyError:
- mongodb_user = User.objects.get(django_user=request.user.id)
-
- # Cache it for next time.
- request.session[u'mongodb_user_id'] = mongodb_user.id
-
- return {u'mongodb_user': mongodb_user}
-
|
Simplify the context processor. Not very useful anymore, in fact.
|
## Code Before:
from .models.nonrel import User
def mongodb_user(request):
if request.user.is_anonymous():
return {u'mongodb_user': None}
try:
mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id'])
except KeyError:
mongodb_user = User.objects.get(django_user=request.user.id)
# Cache it for next time.
request.session[u'mongodb_user_id'] = mongodb_user.id
return {u'mongodb_user': mongodb_user}
## Instruction:
Simplify the context processor. Not very useful anymore, in fact.
## Code After:
def mongodb_user(request):
""" not the most usefull context manager in the world. """
if request.user.is_anonymous():
return {u'mongodb_user': None}
return {u'mongodb_user': request.user.mongo}
|
# ... existing code ...
# ... modified code ...
def mongodb_user(request):
""" not the most usefull context manager in the world. """
...
return {u'mongodb_user': request.user.mongo}
# ... rest of the code ...
|
f0f66aa917d9ec85cfbe2a0460b2d4b4d5ffe0eb
|
middleware/hat_manager.py
|
middleware/hat_manager.py
|
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
|
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
def set_message(self, msg):
self.sense.show_message(msg, scroll_speed=0.05)
|
Add a method to print a message on the sense hat
|
Add a method to print a message on the sense hat
|
Python
|
mit
|
ylerjen/pir-hat,ylerjen/pir-hat,ylerjen/pir-hat
|
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
+ def set_message(self, msg):
+ self.sense.show_message(msg, scroll_speed=0.05)
+
|
Add a method to print a message on the sense hat
|
## Code Before:
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
## Instruction:
Add a method to print a message on the sense hat
## Code After:
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
def set_message(self, msg):
self.sense.show_message(msg, scroll_speed=0.05)
|
// ... existing code ...
return self._pressure
def set_message(self, msg):
self.sense.show_message(msg, scroll_speed=0.05)
// ... rest of the code ...
|
393bc0dc82524802c8d548216d4c51b4394e5394
|
tests.py
|
tests.py
|
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
assertEqual('test', 'test')
def second(self):
"""second test"""
assertEqual('2','2')
|
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
self.assertEqual('test', 'test')
def second(self):
"""second test"""
self.assertEqual('2','2')
|
Add self to test cases
|
Add self to test cases
Change-Id: Ib8a8fea97fb7390613a5521103f0f9d31615f262
|
Python
|
apache-2.0
|
khoser/mini_games
|
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
- assertEqual('test', 'test')
+ self.assertEqual('test', 'test')
def second(self):
"""second test"""
- assertEqual('2','2')
+ self.assertEqual('2','2')
|
Add self to test cases
|
## Code Before:
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
assertEqual('test', 'test')
def second(self):
"""second test"""
assertEqual('2','2')
## Instruction:
Add self to test cases
## Code After:
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
self.assertEqual('test', 'test')
def second(self):
"""second test"""
self.assertEqual('2','2')
|
...
def first(self):
self.assertEqual('test', 'test')
...
"""second test"""
self.assertEqual('2','2')
...
|
29e01ab226f5451e22ba3291e81bbaff13ce1867
|
greenmine/settings/__init__.py
|
greenmine/settings/__init__.py
|
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
|
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
Send more print message to sys.stderr
|
Smallfix: Send more print message to sys.stderr
|
Python
|
agpl-3.0
|
Zaneh-/bearded-tribble-back,astagi/taiga-back,astronaut1712/taiga-back,dayatz/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,crr0004/taiga-back,seanchen/taiga-back,coopsource/taiga-back,EvgeneOskin/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,obimod/taiga-back,dycodedev/taiga-back,Tigerwhit4/taiga-back,19kestier/taiga-back,gauravjns/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,obimod/taiga-back,forging2012/taiga-back,WALR/taiga-back,CoolCloud/taiga-back,coopsource/taiga-back,dayatz/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,dycodedev/taiga-back,19kestier/taiga-back,gam-phon/taiga-back,obimod/taiga-back,rajiteh/taiga-back,coopsource/taiga-back,crr0004/taiga-back,Tigerwhit4/taiga-back,EvgeneOskin/taiga-back,CoolCloud/taiga-back,joshisa/taiga-back,WALR/taiga-back,dayatz/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,CMLL/taiga-back,forging2012/taiga-back,seanchen/taiga-back,astronaut1712/taiga-back,Tigerwhit4/taiga-back,seanchen/taiga-back,forging2012/taiga-back,gauravjns/taiga-back,gauravjns/taiga-back,CoolCloud/taiga-back,Zaneh-/bearded-tribble-back,Tigerwhit4/taiga-back,jeffdwyatt/taiga-back,rajiteh/taiga-back,frt-arch/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,seanchen/taiga-back,forging2012/taiga-back,xdevelsistemas/taiga-back-community,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,Rademade/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,astagi/taiga-back,joshisa/taiga-back,Zaneh-/bearded-tribble-back,xdevelsistemas/taiga-back-community,19kestier/taiga-back,taigaio/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,astronaut1712/taiga-back,joshisa/taiga-back,rajiteh/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,CMLL/taiga-back,astagi/taiga-back,WALR/taiga-back,EvgeneOskin/taiga-back,astronaut1712/taiga-back,Rademade/taiga-back,obimod/taiga-back,xdevelsistemas/taiga-back-community,dycodedev/taiga-back,gam-phon/taiga-back,bdang2012/taiga-back-casting,frt-arch/taiga-back,joshisa/taiga-back,Rademade/taiga-back
|
-
- from __future__ import absolute_import
+ from __future__ import (
+ absolute_import,
+ print_function
+ )
- import os
+ import os, sys
try:
- print "Trying import local.py settings..."
+ print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
- print "Trying import development.py settings..."
+ print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
Send more print message to sys.stderr
|
## Code Before:
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
## Instruction:
Send more print message to sys.stderr
## Code After:
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
// ... existing code ...
from __future__ import (
absolute_import,
print_function
)
import os, sys
// ... modified code ...
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
...
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
// ... rest of the code ...
|
96bb2ba0dc6e58195b598e03d177114becfeba7a
|
nxpy/util.py
|
nxpy/util.py
|
import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
import re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
Remove new_ele() and sub_ele() functions
|
Remove new_ele() and sub_ele() functions
|
Python
|
apache-2.0
|
Kent1/nxpy
|
import re
- from lxml import etree
-
-
- def new_ele(tag, attrs={}, **extra):
- etree.Element(tag, attrs, **extra)
-
-
- def sub_ele(parent, tag, attrs={}, **extra):
- etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
Remove new_ele() and sub_ele() functions
|
## Code Before:
import re
from lxml import etree
def new_ele(tag, attrs={}, **extra):
etree.Element(tag, attrs, **extra)
def sub_ele(parent, tag, attrs={}, **extra):
etree.SubElement(parent, tag, attrs, **extra)
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
## Instruction:
Remove new_ele() and sub_ele() functions
## Code After:
import re
# Globals
tag_pattern = re.compile(r'({.*})?(.*)')
whitespace_pattern = re.compile(r'[\n\r\s]+')
|
# ... existing code ...
import re
# ... rest of the code ...
|
0d3507d5eb9801b38c66fb03804e15b089fb2233
|
tests/cdek_test.py
|
tests/cdek_test.py
|
from __future__ import unicode_literals, print_function
import datetime
import unittest
import mock
from cdek import api, exceptions
@mock.patch('cdek.api.urlopen')
class CdekApiTest(unittest.TestCase):
def setUp(self):
self.reg_user = api.CdekCalc('123456', '123456')
self.unreg_user = api.CdekCalc()
def test_valid_get_secure(self, urlopen):
self.assertEqual(self.reg_user._get_secure(
datetime.datetime.today().date(), self.reg_user.auth_password),
'c5750d7c97a89aa33b8e030d4c7a4847')
self.assertNotEqual(self.reg_user._get_secure(
datetime.datetime.today().date() - datetime.timedelta(days=2),
self.reg_user.auth_password), 'c5750d7c97a89aa33b8e030d4c7a4847')
|
from __future__ import unicode_literals, print_function
import datetime
import unittest
import mock
from cdek import api, exceptions
@mock.patch('cdek.api.urlopen')
class CdekApiTest(unittest.TestCase):
def setUp(self):
self.reg_user = api.CdekCalc('123456', '123456')
self.unreg_user = api.CdekCalc()
def test_valid_get_secure(self, urlopen):
self.assertEqual(self.reg_user._get_secure(
datetime.datetime.strptime('2013-05-30', '%Y-%m-%d'), self.reg_user.auth_password),
'21de6125dbaac7adf68007c6bcf9ac98')
self.assertNotEqual(self.reg_user._get_secure(
datetime.datetime.today().date(), self.reg_user.auth_password),
'21de6125dbaac7adf68007c6bcf9ac98')
|
Fix date for generate secure_key
|
Fix date for generate secure_key
|
Python
|
mit
|
xtelaur/python-cdek,xtelaur/python-cdek
|
from __future__ import unicode_literals, print_function
import datetime
import unittest
import mock
from cdek import api, exceptions
@mock.patch('cdek.api.urlopen')
class CdekApiTest(unittest.TestCase):
def setUp(self):
self.reg_user = api.CdekCalc('123456', '123456')
self.unreg_user = api.CdekCalc()
def test_valid_get_secure(self, urlopen):
self.assertEqual(self.reg_user._get_secure(
- datetime.datetime.today().date(), self.reg_user.auth_password),
+ datetime.datetime.strptime('2013-05-30', '%Y-%m-%d'), self.reg_user.auth_password),
- 'c5750d7c97a89aa33b8e030d4c7a4847')
+ '21de6125dbaac7adf68007c6bcf9ac98')
self.assertNotEqual(self.reg_user._get_secure(
- datetime.datetime.today().date() - datetime.timedelta(days=2),
- self.reg_user.auth_password), 'c5750d7c97a89aa33b8e030d4c7a4847')
+ datetime.datetime.today().date(), self.reg_user.auth_password),
+ '21de6125dbaac7adf68007c6bcf9ac98')
|
Fix date for generate secure_key
|
## Code Before:
from __future__ import unicode_literals, print_function
import datetime
import unittest
import mock
from cdek import api, exceptions
@mock.patch('cdek.api.urlopen')
class CdekApiTest(unittest.TestCase):
def setUp(self):
self.reg_user = api.CdekCalc('123456', '123456')
self.unreg_user = api.CdekCalc()
def test_valid_get_secure(self, urlopen):
self.assertEqual(self.reg_user._get_secure(
datetime.datetime.today().date(), self.reg_user.auth_password),
'c5750d7c97a89aa33b8e030d4c7a4847')
self.assertNotEqual(self.reg_user._get_secure(
datetime.datetime.today().date() - datetime.timedelta(days=2),
self.reg_user.auth_password), 'c5750d7c97a89aa33b8e030d4c7a4847')
## Instruction:
Fix date for generate secure_key
## Code After:
from __future__ import unicode_literals, print_function
import datetime
import unittest
import mock
from cdek import api, exceptions
@mock.patch('cdek.api.urlopen')
class CdekApiTest(unittest.TestCase):
def setUp(self):
self.reg_user = api.CdekCalc('123456', '123456')
self.unreg_user = api.CdekCalc()
def test_valid_get_secure(self, urlopen):
self.assertEqual(self.reg_user._get_secure(
datetime.datetime.strptime('2013-05-30', '%Y-%m-%d'), self.reg_user.auth_password),
'21de6125dbaac7adf68007c6bcf9ac98')
self.assertNotEqual(self.reg_user._get_secure(
datetime.datetime.today().date(), self.reg_user.auth_password),
'21de6125dbaac7adf68007c6bcf9ac98')
|
...
self.assertEqual(self.reg_user._get_secure(
datetime.datetime.strptime('2013-05-30', '%Y-%m-%d'), self.reg_user.auth_password),
'21de6125dbaac7adf68007c6bcf9ac98')
...
self.assertNotEqual(self.reg_user._get_secure(
datetime.datetime.today().date(), self.reg_user.auth_password),
'21de6125dbaac7adf68007c6bcf9ac98')
...
|
f41f9f9e562c6850d70ee17976c9dbb4aa3cca5f
|
pseudodata.py
|
pseudodata.py
|
class PseudoData(dict):
def __init__(self, name_func_dict, sweep):
super(PseudoData, self).__init__()
self.name_func_dict = name_func_dict
self.sweep = sweep
def __getitem__(self, key):
if key in self.keys():
return dict.__getitem__(self, key)
elif key in self.name_func_dict:
func = self.name_func_dict[key]['func']
pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta)
self.__setitem__(key, pcol)
return pcol
else:
return dict.__getitem__(self, key)
def get_names(self):
names = [k for k, v in self.name_func_dict.items() if 'func' in v]
names.sort()
return names
def get_labels(self):
labels = [key['label'] for key in self.name_func_dict.keys()]
return labels
# class PseudoFunction(object):
# def __init__(self, name, label, active):
# self.name = name
# self.label = label
# self.active = active
# def __call__(self):
# return 10
|
class PseudoData(dict):
def __init__(self, name_func_dict, sweep):
super(PseudoData, self).__init__()
self.name_func_dict = name_func_dict
self.sweep = sweep
def __getitem__(self, key):
if key in self.keys():
return dict.__getitem__(self, key)
elif key in self.name_func_dict:
func = self.name_func_dict[key]['func']
pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta)
self.__setitem__(key, pcol)
return pcol
else:
return dict.__getitem__(self, key)
def get_names(self):
names = [k for k, v in self.name_func_dict.items() if 'func' in v]
names.sort()
return names
|
Remove unused get_labels function and commented code
|
Remove unused get_labels function and commented code
|
Python
|
mit
|
mchels/FolderBrowser
|
class PseudoData(dict):
def __init__(self, name_func_dict, sweep):
super(PseudoData, self).__init__()
self.name_func_dict = name_func_dict
self.sweep = sweep
def __getitem__(self, key):
if key in self.keys():
return dict.__getitem__(self, key)
elif key in self.name_func_dict:
func = self.name_func_dict[key]['func']
pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta)
self.__setitem__(key, pcol)
return pcol
else:
return dict.__getitem__(self, key)
def get_names(self):
names = [k for k, v in self.name_func_dict.items() if 'func' in v]
names.sort()
return names
- def get_labels(self):
- labels = [key['label'] for key in self.name_func_dict.keys()]
- return labels
-
-
- # class PseudoFunction(object):
- # def __init__(self, name, label, active):
- # self.name = name
- # self.label = label
- # self.active = active
-
- # def __call__(self):
- # return 10
-
-
|
Remove unused get_labels function and commented code
|
## Code Before:
class PseudoData(dict):
def __init__(self, name_func_dict, sweep):
super(PseudoData, self).__init__()
self.name_func_dict = name_func_dict
self.sweep = sweep
def __getitem__(self, key):
if key in self.keys():
return dict.__getitem__(self, key)
elif key in self.name_func_dict:
func = self.name_func_dict[key]['func']
pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta)
self.__setitem__(key, pcol)
return pcol
else:
return dict.__getitem__(self, key)
def get_names(self):
names = [k for k, v in self.name_func_dict.items() if 'func' in v]
names.sort()
return names
def get_labels(self):
labels = [key['label'] for key in self.name_func_dict.keys()]
return labels
# class PseudoFunction(object):
# def __init__(self, name, label, active):
# self.name = name
# self.label = label
# self.active = active
# def __call__(self):
# return 10
## Instruction:
Remove unused get_labels function and commented code
## Code After:
class PseudoData(dict):
def __init__(self, name_func_dict, sweep):
super(PseudoData, self).__init__()
self.name_func_dict = name_func_dict
self.sweep = sweep
def __getitem__(self, key):
if key in self.keys():
return dict.__getitem__(self, key)
elif key in self.name_func_dict:
func = self.name_func_dict[key]['func']
pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta)
self.__setitem__(key, pcol)
return pcol
else:
return dict.__getitem__(self, key)
def get_names(self):
names = [k for k, v in self.name_func_dict.items() if 'func' in v]
names.sort()
return names
|
# ... existing code ...
return names
# ... rest of the code ...
|
a3d58cc1feeca734898098920e5c7195632d408b
|
atompos/atompos/middleware/logging_middleware.py
|
atompos/atompos/middleware/logging_middleware.py
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
Fix for request timer not always working.
|
Fix for request timer not always working.
|
Python
|
mit
|
jimivdw/OAPoC,bertrand-caron/OAPoC,bertrand-caron/OAPoC
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
+ if not hasattr(request, 'timer'):
+ request.timer = time()
+
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
Fix for request timer not always working.
|
## Code Before:
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
## Instruction:
Fix for request timer not always working.
## Code After:
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
// ... existing code ...
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
// ... rest of the code ...
|
66cc9d8c6f91378fadbbc3e40fe4397e43b7b757
|
mopidy/frontends/mpd/__init__.py
|
mopidy/frontends/mpd/__init__.py
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
Allow reply_to to not be set in messages to the MPD frontend
|
Allow reply_to to not be set in messages to the MPD frontend
|
Python
|
apache-2.0
|
vrs01/mopidy,dbrgn/mopidy,bencevans/mopidy,swak/mopidy,diandiankan/mopidy,tkem/mopidy,glogiotatidis/mopidy,priestd09/mopidy,dbrgn/mopidy,abarisain/mopidy,vrs01/mopidy,swak/mopidy,jmarsik/mopidy,ali/mopidy,vrs01/mopidy,priestd09/mopidy,SuperStarPL/mopidy,mopidy/mopidy,liamw9534/mopidy,pacificIT/mopidy,bencevans/mopidy,woutervanwijk/mopidy,diandiankan/mopidy,adamcik/mopidy,bacontext/mopidy,jodal/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,swak/mopidy,jcass77/mopidy,dbrgn/mopidy,jodal/mopidy,jmarsik/mopidy,mopidy/mopidy,dbrgn/mopidy,mokieyue/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,priestd09/mopidy,bacontext/mopidy,ZenithDK/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,quartz55/mopidy,quartz55/mopidy,ali/mopidy,glogiotatidis/mopidy,hkariti/mopidy,rawdlite/mopidy,kingosticks/mopidy,rawdlite/mopidy,pacificIT/mopidy,ali/mopidy,tkem/mopidy,tkem/mopidy,ZenithDK/mopidy,kingosticks/mopidy,rawdlite/mopidy,hkariti/mopidy,quartz55/mopidy,bacontext/mopidy,ZenithDK/mopidy,bacontext/mopidy,adamcik/mopidy,adamcik/mopidy,mokieyue/mopidy,jcass77/mopidy,jmarsik/mopidy,ZenithDK/mopidy,liamw9534/mopidy,jcass77/mopidy,bencevans/mopidy,tkem/mopidy,swak/mopidy,hkariti/mopidy,ali/mopidy,pacificIT/mopidy,diandiankan/mopidy,vrs01/mopidy,jodal/mopidy,mokieyue/mopidy,abarisain/mopidy,woutervanwijk/mopidy,quartz55/mopidy,bencevans/mopidy,jmarsik/mopidy,hkariti/mopidy,mopidy/mopidy
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
+ if 'reply_to' in message:
- connection = unpickle_connection(message['reply_to'])
+ connection = unpickle_connection(message['reply_to'])
- connection.send(response)
+ connection.send(response)
else:
pass # Ignore messages for other frontends
|
Allow reply_to to not be set in messages to the MPD frontend
|
## Code Before:
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
## Instruction:
Allow reply_to to not be set in messages to the MPD frontend
## Code After:
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
# ... existing code ...
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
# ... rest of the code ...
|
8c5f317a090a23f10adcc837645bd25a8b5626f8
|
shap/models/_model.py
|
shap/models/_model.py
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
Check SHAP Model call type
|
Check SHAP Model call type
|
Python
|
mit
|
slundberg/shap,slundberg/shap,slundberg/shap,slundberg/shap
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
+ from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
- return np.array(self.inner_model(*args))
+ out = self.inner_model(*args)
+ out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
+ return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
Check SHAP Model call type
|
## Code Before:
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
## Instruction:
Check SHAP Model call type
## Code After:
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
...
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
...
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
...
|
14d170eece4e8bb105f5316fb0c6e672a3253b08
|
py3flowtools/flowtools_wrapper.py
|
py3flowtools/flowtools_wrapper.py
|
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOW_EXPORT_ARGS = [
'flow-export',
'-f', '2',
]
def FlowToolsLog(file_path):
with io.open(file_path, mode='rb') as flow_fd, \
io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
FLOW_EXPORT_ARGS,
stdin=flow_fd,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
|
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOW_EXPORT_ARGS = [
'flow-export',
'-f', '2',
]
class FlowToolsLog(object):
def __init__(self, file_path):
self._file_path = file_path
def __iter__(self):
self._parser = self._reader()
return self
def __next__(self):
return next(self._parser)
def next(self):
"""
next method included for compatibility with Python 2
"""
return self.__next__()
def _reader(self):
with io.open(self._file_path, mode='rb') as flow_fd, \
io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
FLOW_EXPORT_ARGS,
stdin=flow_fd,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(
self._file_path
)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
|
Convert FlowToolsLog to a class
|
Convert FlowToolsLog to a class
|
Python
|
mit
|
bbayles/py3flowtools
|
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOW_EXPORT_ARGS = [
'flow-export',
'-f', '2',
]
+ class FlowToolsLog(object):
+ def __init__(self, file_path):
+ self._file_path = file_path
- def FlowToolsLog(file_path):
- with io.open(file_path, mode='rb') as flow_fd, \
- io.open(os.devnull, mode='wb') as DEVNULL:
- with subprocess.Popen(
- FLOW_EXPORT_ARGS,
- stdin=flow_fd,
- stdout=subprocess.PIPE,
- stderr=DEVNULL
- ) as proc:
- iterator = iter(proc.stdout.readline, b'')
- try:
- next(iterator)
- except StopIteration:
- msg = 'Could not extract data from {}'.format(file_path)
- raise IOError(msg)
- for line in iterator:
- parsed_line = FlowLine(line)
- yield parsed_line
+ def __iter__(self):
+ self._parser = self._reader()
+ return self
+
+ def __next__(self):
+ return next(self._parser)
+
+ def next(self):
+ """
+ next method included for compatibility with Python 2
+ """
+ return self.__next__()
+
+ def _reader(self):
+ with io.open(self._file_path, mode='rb') as flow_fd, \
+ io.open(os.devnull, mode='wb') as DEVNULL:
+ with subprocess.Popen(
+ FLOW_EXPORT_ARGS,
+ stdin=flow_fd,
+ stdout=subprocess.PIPE,
+ stderr=DEVNULL
+ ) as proc:
+ iterator = iter(proc.stdout.readline, b'')
+ try:
+ next(iterator)
+ except StopIteration:
+ msg = 'Could not extract data from {}'.format(
+ self._file_path
+ )
+ raise IOError(msg)
+ for line in iterator:
+ parsed_line = FlowLine(line)
+ yield parsed_line
+
|
Convert FlowToolsLog to a class
|
## Code Before:
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOW_EXPORT_ARGS = [
'flow-export',
'-f', '2',
]
def FlowToolsLog(file_path):
with io.open(file_path, mode='rb') as flow_fd, \
io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
FLOW_EXPORT_ARGS,
stdin=flow_fd,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
## Instruction:
Convert FlowToolsLog to a class
## Code After:
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOW_EXPORT_ARGS = [
'flow-export',
'-f', '2',
]
class FlowToolsLog(object):
def __init__(self, file_path):
self._file_path = file_path
def __iter__(self):
self._parser = self._reader()
return self
def __next__(self):
return next(self._parser)
def next(self):
"""
next method included for compatibility with Python 2
"""
return self.__next__()
def _reader(self):
with io.open(self._file_path, mode='rb') as flow_fd, \
io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
FLOW_EXPORT_ARGS,
stdin=flow_fd,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(
self._file_path
)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
|
// ... existing code ...
class FlowToolsLog(object):
def __init__(self, file_path):
self._file_path = file_path
def __iter__(self):
self._parser = self._reader()
return self
def __next__(self):
return next(self._parser)
def next(self):
"""
next method included for compatibility with Python 2
"""
return self.__next__()
def _reader(self):
with io.open(self._file_path, mode='rb') as flow_fd, \
io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
FLOW_EXPORT_ARGS,
stdin=flow_fd,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(
self._file_path
)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
// ... rest of the code ...
|
9f005120c6d408e8cf3097dd74d5dada24305c88
|
src/jsonlogger.py
|
src/jsonlogger.py
|
import logging
import json
import re
from datetime import datetime
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
mappings = {
'asctime': create_timestamp,
'message': lambda r: r.msg,
}
formatters = self.parse()
log_record = {}
for formatter in formatters:
try:
log_record[formatter] = mappings[formatter](record)
except KeyError:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
def create_timestamp(record):
"""Creates a human readable timestamp for a log records created date"""
timestamp = datetime.fromtimestamp(record.created)
return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"),
|
import logging
import json
import re
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
formatters = self.parse()
record.message = record.getMessage()
# only format time if needed
if "asctime" in formatters:
record.asctime = self.formatTime(record, self.datefmt)
log_record = {}
for formatter in formatters:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
|
Use the same logic to format message and asctime than the standard library.
|
Use the same logic to format message and asctime than the standard library.
This way we producte better message text on some circumstances when not logging
a string and use the date formater from the base class that uses the date format
configured from a file or a dict.
|
Python
|
bsd-2-clause
|
madzak/python-json-logger,bbc/python-json-logger
|
import logging
import json
import re
- from datetime import datetime
+
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
- mappings = {
- 'asctime': create_timestamp,
- 'message': lambda r: r.msg,
- }
formatters = self.parse()
+ record.message = record.getMessage()
+ # only format time if needed
+ if "asctime" in formatters:
+ record.asctime = self.formatTime(record, self.datefmt)
+
log_record = {}
for formatter in formatters:
- try:
- log_record[formatter] = mappings[formatter](record)
- except KeyError:
- log_record[formatter] = record.__dict__[formatter]
+ log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
- def create_timestamp(record):
- """Creates a human readable timestamp for a log records created date"""
-
- timestamp = datetime.fromtimestamp(record.created)
- return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"),
-
|
Use the same logic to format message and asctime than the standard library.
|
## Code Before:
import logging
import json
import re
from datetime import datetime
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
mappings = {
'asctime': create_timestamp,
'message': lambda r: r.msg,
}
formatters = self.parse()
log_record = {}
for formatter in formatters:
try:
log_record[formatter] = mappings[formatter](record)
except KeyError:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
def create_timestamp(record):
"""Creates a human readable timestamp for a log records created date"""
timestamp = datetime.fromtimestamp(record.created)
return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"),
## Instruction:
Use the same logic to format message and asctime than the standard library.
## Code After:
import logging
import json
import re
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
formatters = self.parse()
record.message = record.getMessage()
# only format time if needed
if "asctime" in formatters:
record.asctime = self.formatTime(record, self.datefmt)
log_record = {}
for formatter in formatters:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
|
...
import re
...
"""Formats a log record and serializes to json"""
...
record.message = record.getMessage()
# only format time if needed
if "asctime" in formatters:
record.asctime = self.formatTime(record, self.datefmt)
log_record = {}
...
for formatter in formatters:
log_record[formatter] = record.__dict__[formatter]
...
return json.dumps(log_record)
...
|
b39db786b73cc00676d35cd14b42c70d63b21ba3
|
readthedocs/projects/templatetags/projects_tags.py
|
readthedocs/projects/templatetags/projects_tags.py
|
from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
Fix version sorting to make latest and stable first.
|
Fix version sorting to make latest and stable first.
|
Python
|
mit
|
CedarLogic/readthedocs.org,GovReady/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,sunnyzwh/readthedocs.org,rtfd/readthedocs.org,SteveViss/readthedocs.org,wanghaven/readthedocs.org,clarkperkins/readthedocs.org,asampat3090/readthedocs.org,wanghaven/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,takluyver/readthedocs.org,pombredanne/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,takluyver/readthedocs.org,agjohnson/readthedocs.org,royalwang/readthedocs.org,istresearch/readthedocs.org,soulshake/readthedocs.org,Tazer/readthedocs.org,d0ugal/readthedocs.org,techtonik/readthedocs.org,LukasBoersma/readthedocs.org,jerel/readthedocs.org,fujita-shintaro/readthedocs.org,stevepiercy/readthedocs.org,wijerasa/readthedocs.org,titiushko/readthedocs.org,emawind84/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,sid-kap/readthedocs.org,SteveViss/readthedocs.org,sid-kap/readthedocs.org,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,kdkeyser/readthedocs.org,sunnyzwh/readthedocs.org,tddv/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,asampat3090/readthedocs.org,nikolas/readthedocs.org,jerel/readthedocs.org,safwanrahman/readthedocs.org,emawind84/readthedocs.org,safwanrahman/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,techtonik/readthedocs.org,GovReady/readthedocs.org,rtfd/readthedocs.org,asampat3090/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,KamranMackey/readthedocs.org,clarkperkins/readthedocs.org,espdev/readthedocs.org,kenwang76/readthedocs.org,singingwolfboy/readthedocs.org,takluyver/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,atsuyim/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,pombredanne/readthedocs.org,laplaceliu/readthedocs.org,cgourlay/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,singingwolfboy/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,gjtorikian/readthedocs.org,wanghaven/readthedocs.org,attakei/readthedocs-oauth,davidfischer/readthedocs.org,mrshoki/readthedocs.org,rtfd/readthedocs.org,agjohnson/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,gjtorikian/readthedocs.org,Tazer/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,VishvajitP/readthedocs.org,Carreau/readthedocs.org,kdkeyser/readthedocs.org,titiushko/readthedocs.org,fujita-shintaro/readthedocs.org,kenshinthebattosai/readthedocs.org,LukasBoersma/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,kenshinthebattosai/readthedocs.org,michaelmcandrew/readthedocs.org,raven47git/readthedocs.org,GovReady/readthedocs.org,davidfischer/readthedocs.org,mrshoki/readthedocs.org,KamranMackey/readthedocs.org,singingwolfboy/readthedocs.org,sils1297/readthedocs.org,CedarLogic/readthedocs.org,LukasBoersma/readthedocs.org,mrshoki/readthedocs.org,hach-que/readthedocs.org,Carreau/readthedocs.org,michaelmcandrew/readthedocs.org,tddv/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,atsuyim/readthedocs.org,wijerasa/readthedocs.org,espdev/readthedocs.org,dirn/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,michaelmcandrew/readthedocs.org,raven47git/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,royalwang/readthedocs.org,sid-kap/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,mrshoki/readthedocs.org,techtonik/readthedocs.org,kenwang76/readthedocs.org,fujita-shintaro/readthedocs.org,laplaceliu/readthedocs.org,mhils/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,kdkeyser/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,VishvajitP/readthedocs.org,davidfischer/readthedocs.org,royalwang/readthedocs.org,sils1297/readthedocs.org,mhils/readthedocs.org,cgourlay/readthedocs.org,asampat3090/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,raven47git/readthedocs.org,d0ugal/readthedocs.org,wijerasa/readthedocs.org,nikolas/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,laplaceliu/readthedocs.org,sunnyzwh/readthedocs.org,nikolas/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,cgourlay/readthedocs.org,Tazer/readthedocs.org,Carreau/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,Carreau/readthedocs.org,davidfischer/readthedocs.org,espdev/readthedocs.org
|
from django import template
+ from distutils2.version import NormalizedVersion
+ from projects.utils import mkversion
+
register = template.Library()
+
+
+ def make_version(version):
+ ver = mkversion(version)
+ if not ver:
+ if version.slug == 'latest':
+ return NormalizedVersion('99999.0', error_on_huge_major_num=False)
+ elif version.slug == 'stable':
+ return NormalizedVersion('9999.0', error_on_huge_major_num=False)
+ else:
+ return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
- from distutils2.version import NormalizedVersion
- from projects.utils import mkversion
- fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
+ sorted_verisons = sorted(versions,
+ key=make_version,
+ reverse=True)
- return sorted(versions,
+ return sorted_verisons
- key=lambda v: (mkversion(v) or fallback),
- reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
Fix version sorting to make latest and stable first.
|
## Code Before:
from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
## Instruction:
Fix version sorting to make latest and stable first.
## Code After:
from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
# ... existing code ...
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
# ... modified code ...
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
# ... rest of the code ...
|
37715104dec586ea67b253e4e7ed35795cb5ea8c
|
track.py
|
track.py
|
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
|
from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
Add google measurement protocol back
|
Add google measurement protocol back
|
Python
|
mit
|
reneepadgham/diverseui,reneepadgham/diverseui,reneepadgham/diverseui
|
+ from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
- # event = Event('API', 'Fetch', label=label, value=count)
+ data = event('API', 'Fetch', label=label, value=count)
- # report('UA-68765997-3', client_id, event)
+ report('UA-68765997-3', client_id, data)
|
Add google measurement protocol back
|
## Code Before:
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
## Instruction:
Add google measurement protocol back
## Code After:
from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
// ... existing code ...
from google_measurement_protocol import event, report
import uuid
// ... modified code ...
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
// ... rest of the code ...
|
540bffe17ede75bc6afd9b2d45e343e0eac4552b
|
rna-transcription/rna_transcription.py
|
rna-transcription/rna_transcription.py
|
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
Add an exception based version
|
Add an exception based version
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
- DNA = {"A", "C", "T", "G"}
-
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
+ try:
+ return "".join([TRANS[n] for n in dna])
+ except KeyError:
+ return ""
+
+
+ # Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
+
+ DNA = {"A", "C", "T", "G"}
+ TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
+
+
+ def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
Add an exception based version
|
## Code Before:
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
## Instruction:
Add an exception based version
## Code After:
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
# ... existing code ...
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
# ... modified code ...
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
# ... rest of the code ...
|
a2f1cdc05e63b7b68c16f3fd1e5203608888b059
|
traits/util/deprecated.py
|
traits/util/deprecated.py
|
""" A decorator for marking methods/functions as deprecated. """
# Standard library imports.
import logging
# We only warn about each function or method once!
_cache = {}
def deprecated(message):
""" A factory for decorators for marking methods/functions as deprecated.
"""
def decorator(fn):
""" A decorator for marking methods/functions as deprecated. """
def wrapper(*args, **kw):
""" The method/function wrapper. """
global _cache
module_name = fn.__module__
function_name = fn.__name__
if (module_name, function_name) not in _cache:
logging.getLogger(module_name).warning(
'DEPRECATED: %s.%s, %s' % (
module_name, function_name, message
)
)
_cache[(module_name, function_name)] = True
return fn(*args, **kw)
wrapper.__doc__ = fn.__doc__
wrapper.__name__ = fn.__name__
return wrapper
return decorator
#### EOF ######################################################################
|
""" A decorator for marking methods/functions as deprecated. """
# Standard library imports.
import functools
import warnings
def deprecated(message):
""" A factory for decorators for marking methods/functions as deprecated.
"""
def decorator(fn):
""" A decorator for marking methods/functions as deprecated. """
@functools.wraps(fn)
def wrapper(*args, **kw):
""" The method/function wrapper. """
warnings.warn(message, DeprecationWarning, stacklevel=2)
return fn(*args, **kw)
return wrapper
return decorator
|
Simplify deprecation machinery: don't cache previous messages, and use warnings instead of logging.
|
Simplify deprecation machinery: don't cache previous messages, and use warnings instead of logging.
|
Python
|
bsd-3-clause
|
burnpanck/traits,burnpanck/traits
|
+
""" A decorator for marking methods/functions as deprecated. """
-
# Standard library imports.
+ import functools
+ import warnings
- import logging
-
- # We only warn about each function or method once!
- _cache = {}
def deprecated(message):
""" A factory for decorators for marking methods/functions as deprecated.
"""
-
def decorator(fn):
""" A decorator for marking methods/functions as deprecated. """
+ @functools.wraps(fn)
def wrapper(*args, **kw):
""" The method/function wrapper. """
+ warnings.warn(message, DeprecationWarning, stacklevel=2)
- global _cache
-
- module_name = fn.__module__
- function_name = fn.__name__
-
- if (module_name, function_name) not in _cache:
- logging.getLogger(module_name).warning(
- 'DEPRECATED: %s.%s, %s' % (
- module_name, function_name, message
- )
- )
-
- _cache[(module_name, function_name)] = True
-
return fn(*args, **kw)
-
- wrapper.__doc__ = fn.__doc__
- wrapper.__name__ = fn.__name__
return wrapper
return decorator
- #### EOF ######################################################################
-
|
Simplify deprecation machinery: don't cache previous messages, and use warnings instead of logging.
|
## Code Before:
""" A decorator for marking methods/functions as deprecated. """
# Standard library imports.
import logging
# We only warn about each function or method once!
_cache = {}
def deprecated(message):
""" A factory for decorators for marking methods/functions as deprecated.
"""
def decorator(fn):
""" A decorator for marking methods/functions as deprecated. """
def wrapper(*args, **kw):
""" The method/function wrapper. """
global _cache
module_name = fn.__module__
function_name = fn.__name__
if (module_name, function_name) not in _cache:
logging.getLogger(module_name).warning(
'DEPRECATED: %s.%s, %s' % (
module_name, function_name, message
)
)
_cache[(module_name, function_name)] = True
return fn(*args, **kw)
wrapper.__doc__ = fn.__doc__
wrapper.__name__ = fn.__name__
return wrapper
return decorator
#### EOF ######################################################################
## Instruction:
Simplify deprecation machinery: don't cache previous messages, and use warnings instead of logging.
## Code After:
""" A decorator for marking methods/functions as deprecated. """
# Standard library imports.
import functools
import warnings
def deprecated(message):
""" A factory for decorators for marking methods/functions as deprecated.
"""
def decorator(fn):
""" A decorator for marking methods/functions as deprecated. """
@functools.wraps(fn)
def wrapper(*args, **kw):
""" The method/function wrapper. """
warnings.warn(message, DeprecationWarning, stacklevel=2)
return fn(*args, **kw)
return wrapper
return decorator
|
...
""" A decorator for marking methods/functions as deprecated. """
...
# Standard library imports.
import functools
import warnings
...
"""
def decorator(fn):
...
@functools.wraps(fn)
def wrapper(*args, **kw):
...
warnings.warn(message, DeprecationWarning, stacklevel=2)
return fn(*args, **kw)
...
return decorator
...
|
1f4bd95d758db4e2388b180f637963e26a033790
|
InvenTree/part/migrations/0034_auto_20200404_1238.py
|
InvenTree/part/migrations/0034_auto_20200404_1238.py
|
from django.db import migrations
from django.db.utils import OperationalError, ProgrammingError
from part.models import Part
from stdimage.utils import render_variations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
"""
try:
for part in Part.objects.all():
# Render thumbnail for each existing Part
if part.image:
try:
part.image.render_variations()
except FileNotFoundError:
print("Missing image:", part.image())
# The image is missing, so clear the field
part.image = None
part.save()
except (OperationalError, ProgrammingError):
# Migrations have not yet been applied - table does not exist
print("Could not generate Part thumbnails")
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
migrations.RunPython(create_thumbnails),
]
|
from django.db import migrations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
Note: This functionality is now performed in apps.py,
as running the thumbnail script here caused too many database level errors.
This migration is left here to maintain the database migration history
"""
pass
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
migrations.RunPython(create_thumbnails, reverse_code=create_thumbnails),
]
|
Remove the problematic migration entirely
|
Remove the problematic migration entirely
- The thumbnail check code is run every time the server is started anyway!
|
Python
|
mit
|
inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree
|
from django.db import migrations
- from django.db.utils import OperationalError, ProgrammingError
-
- from part.models import Part
- from stdimage.utils import render_variations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
+
+ Note: This functionality is now performed in apps.py,
+ as running the thumbnail script here caused too many database level errors.
+
+ This migration is left here to maintain the database migration history
+
"""
+ pass
- try:
- for part in Part.objects.all():
- # Render thumbnail for each existing Part
- if part.image:
- try:
- part.image.render_variations()
- except FileNotFoundError:
- print("Missing image:", part.image())
- # The image is missing, so clear the field
- part.image = None
- part.save()
-
- except (OperationalError, ProgrammingError):
- # Migrations have not yet been applied - table does not exist
- print("Could not generate Part thumbnails")
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
- migrations.RunPython(create_thumbnails),
+ migrations.RunPython(create_thumbnails, reverse_code=create_thumbnails),
]
|
Remove the problematic migration entirely
|
## Code Before:
from django.db import migrations
from django.db.utils import OperationalError, ProgrammingError
from part.models import Part
from stdimage.utils import render_variations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
"""
try:
for part in Part.objects.all():
# Render thumbnail for each existing Part
if part.image:
try:
part.image.render_variations()
except FileNotFoundError:
print("Missing image:", part.image())
# The image is missing, so clear the field
part.image = None
part.save()
except (OperationalError, ProgrammingError):
# Migrations have not yet been applied - table does not exist
print("Could not generate Part thumbnails")
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
migrations.RunPython(create_thumbnails),
]
## Instruction:
Remove the problematic migration entirely
## Code After:
from django.db import migrations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
Note: This functionality is now performed in apps.py,
as running the thumbnail script here caused too many database level errors.
This migration is left here to maintain the database migration history
"""
pass
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
migrations.RunPython(create_thumbnails, reverse_code=create_thumbnails),
]
|
...
from django.db import migrations
...
Create thumbnails for all existing Part images.
Note: This functionality is now performed in apps.py,
as running the thumbnail script here caused too many database level errors.
This migration is left here to maintain the database migration history
"""
pass
...
operations = [
migrations.RunPython(create_thumbnails, reverse_code=create_thumbnails),
]
...
|
b51e0ff9407f8a609be580d8fcb9cad6cfd267d8
|
setup.py
|
setup.py
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.1'
package_data = {
'render_as': [
'templates/avoid_clash_with_real_app/*.html',
'templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.2'
package_data = {
'render_as': [
'test_templates/avoid_clash_with_real_app/*.html',
'test_templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Include test templates in distributions.
|
Include test templates in distributions.
This probably wasn't working before, although apparently I didn't
notice. But then no one really runs tests for their 3PA, do they?
This is v1.2.
|
Python
|
mit
|
jaylett/django-render-as,jaylett/django-render-as
|
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
- VERSION = '1.1'
+ VERSION = '1.2'
package_data = {
'render_as': [
- 'templates/avoid_clash_with_real_app/*.html',
+ 'test_templates/avoid_clash_with_real_app/*.html',
- 'templates/render_as/*.html',
+ 'test_templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Include test templates in distributions.
|
## Code Before:
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.1'
package_data = {
'render_as': [
'templates/avoid_clash_with_real_app/*.html',
'templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
## Instruction:
Include test templates in distributions.
## Code After:
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
PACKAGE = 'django-render-as'
VERSION = '1.2'
package_data = {
'render_as': [
'test_templates/avoid_clash_with_real_app/*.html',
'test_templates/render_as/*.html',
],
}
setup(
name=PACKAGE, version=VERSION,
description="Template rendering indirector based on object class",
packages=[
'render_as',
'render_as/templatetags',
],
package_data=package_data,
license='MIT',
author='James Aylett',
author_email='[email protected]',
install_requires=[
'Django~=1.10',
],
classifiers=[
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
# ... existing code ...
PACKAGE = 'django-render-as'
VERSION = '1.2'
# ... modified code ...
'render_as': [
'test_templates/avoid_clash_with_real_app/*.html',
'test_templates/render_as/*.html',
],
# ... rest of the code ...
|
d4b487ed1b276be230440e60ab3cdc81e73cff47
|
tests/unit/utils/test_utils.py
|
tests/unit/utils/test_utils.py
|
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
|
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
Add unit test to get opts from the environment
|
Add unit test to get opts from the environment
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
+ def test_get_module_environment_opts(self):
+ '''
+ Test for salt.utils.get_module_environment
+
+ :return:
+ '''
+ expectation = {'message': 'Melting hard drives'}
+ _globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
+ '__file__': '/daemons/loose/in/system.py'}
+ assert salt.utils.get_module_environment(_globals) == expectation
+
|
Add unit test to get opts from the environment
|
## Code Before:
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
## Instruction:
Add unit test to get opts from the environment
## Code After:
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
// ... existing code ...
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
// ... rest of the code ...
|
d0c8968766a06e8c426e75edddb9c6ce88d080a0
|
fsspec/implementations/tests/test_common.py
|
fsspec/implementations/tests/test_common.py
|
import datetime
import pytest
from fsspec import AbstractFileSystem
from fsspec.implementations.tests.conftest import READ_ONLY_FILESYSTEMS
TEST_FILE = 'file'
@pytest.mark.parametrize("fs", ['local'], indirect=["fs"])
def test_created(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
assert isinstance(created, datetime.datetime)
finally:
if not isinstance(fs, tuple(READ_ONLY_FILESYSTEMS)):
fs.rm(TEST_FILE)
@pytest.mark.parametrize("fs", ["local"], indirect=["fs"])
def test_modified(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
fs.touch(TEST_FILE)
modified = fs.modified(path=TEST_FILE)
assert modified > created
assert isinstance(created, datetime.datetime)
finally:
fs.rm(TEST_FILE)
|
import datetime
import pytest
from fsspec import AbstractFileSystem
from fsspec.implementations.tests.conftest import READ_ONLY_FILESYSTEMS
TEST_FILE = 'file'
@pytest.mark.parametrize("fs", ['local'], indirect=["fs"])
def test_created(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
assert isinstance(created, datetime.datetime)
finally:
if not isinstance(fs, tuple(READ_ONLY_FILESYSTEMS)):
fs.rm(TEST_FILE)
@pytest.mark.parametrize("fs", ["local"], indirect=["fs"])
def test_modified(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
fs.touch(TEST_FILE)
modified = fs.modified(path=TEST_FILE)
assert isinstance(modified, datetime.datetime)
assert modified > created
finally:
fs.rm(TEST_FILE)
|
Fix typo in test assertion
|
Fix typo in test assertion
|
Python
|
bsd-3-clause
|
fsspec/filesystem_spec,intake/filesystem_spec,fsspec/filesystem_spec
|
import datetime
import pytest
from fsspec import AbstractFileSystem
from fsspec.implementations.tests.conftest import READ_ONLY_FILESYSTEMS
TEST_FILE = 'file'
@pytest.mark.parametrize("fs", ['local'], indirect=["fs"])
def test_created(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
assert isinstance(created, datetime.datetime)
finally:
if not isinstance(fs, tuple(READ_ONLY_FILESYSTEMS)):
fs.rm(TEST_FILE)
@pytest.mark.parametrize("fs", ["local"], indirect=["fs"])
def test_modified(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
fs.touch(TEST_FILE)
modified = fs.modified(path=TEST_FILE)
+ assert isinstance(modified, datetime.datetime)
assert modified > created
- assert isinstance(created, datetime.datetime)
finally:
fs.rm(TEST_FILE)
|
Fix typo in test assertion
|
## Code Before:
import datetime
import pytest
from fsspec import AbstractFileSystem
from fsspec.implementations.tests.conftest import READ_ONLY_FILESYSTEMS
TEST_FILE = 'file'
@pytest.mark.parametrize("fs", ['local'], indirect=["fs"])
def test_created(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
assert isinstance(created, datetime.datetime)
finally:
if not isinstance(fs, tuple(READ_ONLY_FILESYSTEMS)):
fs.rm(TEST_FILE)
@pytest.mark.parametrize("fs", ["local"], indirect=["fs"])
def test_modified(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
fs.touch(TEST_FILE)
modified = fs.modified(path=TEST_FILE)
assert modified > created
assert isinstance(created, datetime.datetime)
finally:
fs.rm(TEST_FILE)
## Instruction:
Fix typo in test assertion
## Code After:
import datetime
import pytest
from fsspec import AbstractFileSystem
from fsspec.implementations.tests.conftest import READ_ONLY_FILESYSTEMS
TEST_FILE = 'file'
@pytest.mark.parametrize("fs", ['local'], indirect=["fs"])
def test_created(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
assert isinstance(created, datetime.datetime)
finally:
if not isinstance(fs, tuple(READ_ONLY_FILESYSTEMS)):
fs.rm(TEST_FILE)
@pytest.mark.parametrize("fs", ["local"], indirect=["fs"])
def test_modified(fs: AbstractFileSystem):
try:
fs.touch(TEST_FILE)
created = fs.created(path=TEST_FILE)
fs.touch(TEST_FILE)
modified = fs.modified(path=TEST_FILE)
assert isinstance(modified, datetime.datetime)
assert modified > created
finally:
fs.rm(TEST_FILE)
|
// ... existing code ...
modified = fs.modified(path=TEST_FILE)
assert isinstance(modified, datetime.datetime)
assert modified > created
finally:
// ... rest of the code ...
|
81b1cf6973dde3ca23bbe5ac071d3decad81079a
|
pydsa/sleep_sort.py
|
pydsa/sleep_sort.py
|
from time import sleep
from threading import Timer
# Sleep Sort ;)
# Complexity: O(max(input)+n)
def sleep_sort(a):
"""
Sorts the list 'a' using Sleep sort algorithm
>>> from pydsa import sleep_sort
>>> a = [3, 4, 2]
>>> sleep_sort(a)
[2, 3, 4]
"""
sleep_sort.result = []
def add1(x):
sleep_sort.result.append(x)
mx = a[0]
for v in a:
if mx < v: mx = v
Timer(v, add1, [v]).start()
sleep(mx + 1)
return sleep_sort.result
|
from time import sleep
from threading import Timer
# Sleep Sort ;)
# Complexity: O(max(input)+n)
def sleep_sort(a):
"""
Sorts the list 'a' using Sleep sort algorithm
>>> from pydsa import sleep_sort
>>> a = [3, 4, 2]
>>> sleep_sort(a)
[2, 3, 4]
"""
sleep_sort.result = []
def add1(x):
sleep_sort.result.append(x)
mx = a[0]
for v in a:
if mx < v:
mx = v
Timer(v, add1, [v]).start()
sleep(mx + 1)
return sleep_sort.result
|
Format code according to PEP8
|
Format code according to PEP8
|
Python
|
bsd-3-clause
|
rehassachdeva/pydsa,aktech/pydsa
|
from time import sleep
from threading import Timer
# Sleep Sort ;)
# Complexity: O(max(input)+n)
+
def sleep_sort(a):
"""
Sorts the list 'a' using Sleep sort algorithm
>>> from pydsa import sleep_sort
>>> a = [3, 4, 2]
>>> sleep_sort(a)
[2, 3, 4]
"""
sleep_sort.result = []
+
def add1(x):
sleep_sort.result.append(x)
+
mx = a[0]
for v in a:
- if mx < v: mx = v
+ if mx < v:
+ mx = v
Timer(v, add1, [v]).start()
sleep(mx + 1)
return sleep_sort.result
|
Format code according to PEP8
|
## Code Before:
from time import sleep
from threading import Timer
# Sleep Sort ;)
# Complexity: O(max(input)+n)
def sleep_sort(a):
"""
Sorts the list 'a' using Sleep sort algorithm
>>> from pydsa import sleep_sort
>>> a = [3, 4, 2]
>>> sleep_sort(a)
[2, 3, 4]
"""
sleep_sort.result = []
def add1(x):
sleep_sort.result.append(x)
mx = a[0]
for v in a:
if mx < v: mx = v
Timer(v, add1, [v]).start()
sleep(mx + 1)
return sleep_sort.result
## Instruction:
Format code according to PEP8
## Code After:
from time import sleep
from threading import Timer
# Sleep Sort ;)
# Complexity: O(max(input)+n)
def sleep_sort(a):
"""
Sorts the list 'a' using Sleep sort algorithm
>>> from pydsa import sleep_sort
>>> a = [3, 4, 2]
>>> sleep_sort(a)
[2, 3, 4]
"""
sleep_sort.result = []
def add1(x):
sleep_sort.result.append(x)
mx = a[0]
for v in a:
if mx < v:
mx = v
Timer(v, add1, [v]).start()
sleep(mx + 1)
return sleep_sort.result
|
# ... existing code ...
# Complexity: O(max(input)+n)
# ... modified code ...
sleep_sort.result = []
def add1(x):
...
sleep_sort.result.append(x)
mx = a[0]
...
for v in a:
if mx < v:
mx = v
Timer(v, add1, [v]).start()
# ... rest of the code ...
|
ff61fb41273b8bf94bd0c64ddb1a4c1e1c91bb5f
|
api/__init__.py
|
api/__init__.py
|
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
|
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
application.add_url_rule('/auth/register',
'register',
register)
application.add_url_rule('/auth/login',
'login',
login)
db.init_app(application)
return application
from api.api import *
|
Add url rules for public unauthorised routes
|
Add url rules for public unauthorised routes
|
Python
|
mit
|
EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list
|
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
+
+ application.add_url_rule('/auth/register',
+ 'register',
+ register)
+
+ application.add_url_rule('/auth/login',
+ 'login',
+ login)
+
db.init_app(application)
return application
from api.api import *
|
Add url rules for public unauthorised routes
|
## Code Before:
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
## Instruction:
Add url rules for public unauthorised routes
## Code After:
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
application.add_url_rule('/auth/register',
'register',
register)
application.add_url_rule('/auth/login',
'login',
login)
db.init_app(application)
return application
from api.api import *
|
// ... existing code ...
application.config.from_object(config[config_name])
application.add_url_rule('/auth/register',
'register',
register)
application.add_url_rule('/auth/login',
'login',
login)
db.init_app(application)
// ... rest of the code ...
|
90e7bc2c8313de2a5054d5290441c527f5f2c253
|
gameButton.py
|
gameButton.py
|
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
def renderButton(self, surface, isSelected, origin_x, origin_y):
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
def runGame(self):
self.importedGameFunction()
|
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
WHITE = [255, 255, 255]
BLACK = [0, 0, 0]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
self.font = pygame.font.SysFont("monospace", 15)
def renderButton(self, surface, isSelected, origin_x, origin_y):
label = self.font.render(self.label, True, self.BLACK)
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
surface.blit(label,[origin_x + 5, origin_y + (.3 * self.height)])
def runGame(self):
self.importedGameFunction()
|
Add labels to menu buttons
|
Add labels to menu buttons
|
Python
|
mit
|
MEhlinger/rpi_pushbutton_games
|
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
+ WHITE = [255, 255, 255]
+ BLACK = [0, 0, 0]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
+ self.font = pygame.font.SysFont("monospace", 15)
def renderButton(self, surface, isSelected, origin_x, origin_y):
+ label = self.font.render(self.label, True, self.BLACK)
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
+ surface.blit(label,[origin_x + 5, origin_y + (.3 * self.height)])
def runGame(self):
self.importedGameFunction()
|
Add labels to menu buttons
|
## Code Before:
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
def renderButton(self, surface, isSelected, origin_x, origin_y):
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
def runGame(self):
self.importedGameFunction()
## Instruction:
Add labels to menu buttons
## Code After:
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
WHITE = [255, 255, 255]
BLACK = [0, 0, 0]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
self.font = pygame.font.SysFont("monospace", 15)
def renderButton(self, surface, isSelected, origin_x, origin_y):
label = self.font.render(self.label, True, self.BLACK)
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
surface.blit(label,[origin_x + 5, origin_y + (.3 * self.height)])
def runGame(self):
self.importedGameFunction()
|
// ... existing code ...
PINK = [255, 55, 135]
WHITE = [255, 255, 255]
BLACK = [0, 0, 0]
// ... modified code ...
self.importedGameFunction = importedGameFunction
self.font = pygame.font.SysFont("monospace", 15)
def renderButton(self, surface, isSelected, origin_x, origin_y):
label = self.font.render(self.label, True, self.BLACK)
if isSelected:
...
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
surface.blit(label,[origin_x + 5, origin_y + (.3 * self.height)])
def runGame(self):
// ... rest of the code ...
|
3bad6c23fad5525628db1c3c1b99f3f86c08db63
|
cloudmeta/apps/metadata/models.py
|
cloudmeta/apps/metadata/models.py
|
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
|
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-384', 'ecdsa-sha2-nistp384'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
|
Allow ecdsa 384 bit keys too
|
Allow ecdsa 384 bit keys too
|
Python
|
agpl-3.0
|
bencord0/cloudmeta,bencord0/cloudmeta
|
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
+ ('ECC-384', 'ecdsa-sha2-nistp384'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
|
Allow ecdsa 384 bit keys too
|
## Code Before:
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
## Instruction:
Allow ecdsa 384 bit keys too
## Code After:
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-384', 'ecdsa-sha2-nistp384'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
|
# ... existing code ...
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-384', 'ecdsa-sha2-nistp384'),
('ECC-521', 'ecdsa-sha2-nistp521'),
# ... rest of the code ...
|
ef94948a8ce16d9d80fb69950381e0936a462bb0
|
tests/config_tests.py
|
tests/config_tests.py
|
from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
assert_equal(config.api_key, 'API Key')
assert_equal(config.location, 'Zipcode')
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
|
from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
|
Update test to get api_key from environment
|
Update test to get api_key from environment
|
Python
|
mit
|
paris3200/Weather,paris3200/wunderapi
|
from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
- assert_equal(config.api_key, 'API Key')
- assert_equal(config.location, 'Zipcode')
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
|
Update test to get api_key from environment
|
## Code Before:
from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
assert_equal(config.api_key, 'API Key')
assert_equal(config.location, 'Zipcode')
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
## Instruction:
Update test to get api_key from environment
## Code After:
from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
|
...
config.parse_config()
assert_equal(config.date_format, 'date')
...
|
6d0b2b5787be4d3a23fa74eccebb4935cb85d48b
|
salt/runners/state.py
|
salt/runners/state.py
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
Fix traceback because outputter expects data in {'host', data.. } format
|
Fix traceback because outputter expects data in {'host', data.. } format
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
- {key: val},
+ {'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
Fix traceback because outputter expects data in {'host', data.. } format
|
## Code Before:
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
## Instruction:
Fix traceback because outputter expects data in {'host', data.. } format
## Code After:
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
# ... existing code ...
salt.output.display_output(
{'local': {key: val}},
'highstate',
# ... rest of the code ...
|
ee3634fbee7e0bd311337007743b30934aca73ba
|
pyfibot/modules/module_thetvdb.py
|
pyfibot/modules/module_thetvdb.py
|
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
|
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@374 dda364a1-ef19-0410-af65-756c83048fb2
|
Python
|
bsd-3-clause
|
rnyberg/pyfibot,rnyberg/pyfibot,aapa/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,aapa/pyfibot,EArmour/pyfibot,huqa/pyfibot
|
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
- latest_season = series[series.keys()[-1]]
+ episodes = []
+ # find all episodes with airdate > now
+ for season_no, season in series.items():
- for episode_no, episode in latest_season.items():
+ for episode_no, episode in season.items():
- firstaired = episode['firstaired']
+ firstaired = episode['firstaired']
+ if not firstaired:
+ continue
+ airdate = datetime.strptime(firstaired, "%Y-%m-%d")
+ td = airdate - now
+ # find the next unaired episode
+ if td > timedelta(0, 0, 0):
+ episodes.append(episode)
- if not firstaired:
- break
+ # if any episodes were found, find out the one with airdate closest to now
+ if episodes:
+ # sort the list just in case
+ episodes = sorted(episodes, key=firstaired)
+ episode = episodes[0]
+ td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
- airdate = datetime.strptime(firstaired, "%Y-%m-%d")
- td = airdate - now
- # find the next unaired episode
- if td > timedelta(0, 0, 0):
- msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
+ msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
- bot.say(channel, msg.encode("UTF-8"))
+ bot.say(channel, msg.encode("UTF-8"))
+ else:
- return
-
- msg = "No new episode airdates found for %s" % series.data['seriesname']
+ msg = "No new episode airdates found for %s" % series.data['seriesname']
- bot.say(channel, msg.encode("UTF-8"))
+ bot.say(channel, msg.encode("UTF-8"))
|
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
|
## Code Before:
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
## Instruction:
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
## Code After:
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
// ... existing code ...
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
// ... rest of the code ...
|
0ddaed24e0f011ca1bb777af49936f64684a7d4c
|
bin/scripts/contig_length_filter.py
|
bin/scripts/contig_length_filter.py
|
import sys
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
if len(sys.argv) < 5:
print("Usage: %s <length threshold> <contigs_file> <suffix> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
suffix = sys.argv[3]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
output_handle = open(sys.argv[4], "w")
SeqIO.write((SeqRecord(record.seq, (record.name + "_" + suffix).replace(".", "_"), "","") for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1])), output_handle, "fasta")
output_handle.close()
|
import sys
from Bio import SeqIO
if len(sys.argv) < 4:
print("Usage: %s <length threshold> <contigs_file> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
filtered_iterator = (record for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1]))
output_handle = open(sys.argv[3], "w")
SeqIO.write(filtered_iterator, output_handle, "fasta")
output_handle.close()
|
Revert "length filter script now adds provided suffix to contig names"
|
Revert "length filter script now adds provided suffix to contig names"
This reverts commit 4d3985f667465eb5564de4fada8820e23607a58b.
|
Python
|
mit
|
tanaes/snakemake_assemble,tanaes/snakemake_assemble,tanaes/snakemake_assemble
|
import sys
from Bio import SeqIO
- from Bio.SeqRecord import SeqRecord
- if len(sys.argv) < 5:
+ if len(sys.argv) < 4:
- print("Usage: %s <length threshold> <contigs_file> <suffix> <output>" % sys.argv[0])
+ print("Usage: %s <length threshold> <contigs_file> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
- suffix = sys.argv[3]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
-
+ filtered_iterator = (record for record in input_seq_iterator \
+ if len(record.seq) > int(sys.argv[1]))
+
- output_handle = open(sys.argv[4], "w")
+ output_handle = open(sys.argv[3], "w")
+ SeqIO.write(filtered_iterator, output_handle, "fasta")
- SeqIO.write((SeqRecord(record.seq, (record.name + "_" + suffix).replace(".", "_"), "","") for record in input_seq_iterator \
- if len(record.seq) > int(sys.argv[1])), output_handle, "fasta")
output_handle.close()
|
Revert "length filter script now adds provided suffix to contig names"
|
## Code Before:
import sys
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
if len(sys.argv) < 5:
print("Usage: %s <length threshold> <contigs_file> <suffix> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
suffix = sys.argv[3]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
output_handle = open(sys.argv[4], "w")
SeqIO.write((SeqRecord(record.seq, (record.name + "_" + suffix).replace(".", "_"), "","") for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1])), output_handle, "fasta")
output_handle.close()
## Instruction:
Revert "length filter script now adds provided suffix to contig names"
## Code After:
import sys
from Bio import SeqIO
if len(sys.argv) < 4:
print("Usage: %s <length threshold> <contigs_file> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
filtered_iterator = (record for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1]))
output_handle = open(sys.argv[3], "w")
SeqIO.write(filtered_iterator, output_handle, "fasta")
output_handle.close()
|
// ... existing code ...
from Bio import SeqIO
if len(sys.argv) < 4:
print("Usage: %s <length threshold> <contigs_file> <output>" % sys.argv[0])
sys.exit(1)
// ... modified code ...
f_n = sys.argv[2]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
filtered_iterator = (record for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1]))
output_handle = open(sys.argv[3], "w")
SeqIO.write(filtered_iterator, output_handle, "fasta")
output_handle.close()
// ... rest of the code ...
|
214511a6fbdd0763667e740735d0876f78a3b244
|
derpibooru/query.py
|
derpibooru/query.py
|
from .request import url
class Search(object):
def __init__(self, key=None, q=[], sf="created_at", sd="desc"):
self._parameters = {
"key": key,
"q": q,
"sf": sf,
"sd": sd
}
@property
def parameters(self):
return self._parameters
@property
def url(self):
return url(**self.parameters)
def key(self, key=None):
self._parameters["key"] = key
return Search(**self._parameters)
def query(self, *q):
self._parameters["q"] = [str(tag).strip() for tag in q]
return Search(**self._parameters)
def descending(self):
self._parameters["sd"] = "desc"
return Search(**self._parameters)
def ascending(self):
self._parameters["sd"] = "asc"
return Search(**self._parameters)
def sort_by(self, sf):
self._parameters["sf"] = sf
return Search(**self._parameters)
|
from .request import url
class Search(object):
def __init__(self, key=None, q=[], sf="created_at", sd="desc"):
self._parameters = {
"key": key,
"q": [str(tag).strip() for tag in q if tag],
"sf": sf,
"sd": sd
}
@property
def parameters(self):
return self._parameters
@property
def url(self):
return url(**self.parameters)
def key(self, key=None):
self._parameters["key"] = key
return Search(**self._parameters)
def query(self, *q):
self._parameters["q"] = [str(tag).strip() for tag in q if tag]
return Search(**self._parameters)
def descending(self):
self._parameters["sd"] = "desc"
return Search(**self._parameters)
def ascending(self):
self._parameters["sd"] = "asc"
return Search(**self._parameters)
def sort_by(self, sf):
self._parameters["sf"] = sf
return Search(**self._parameters)
|
Add check for empty tags
|
Add check for empty tags
|
Python
|
bsd-2-clause
|
joshua-stone/DerPyBooru
|
from .request import url
class Search(object):
def __init__(self, key=None, q=[], sf="created_at", sd="desc"):
self._parameters = {
"key": key,
- "q": q,
+ "q": [str(tag).strip() for tag in q if tag],
"sf": sf,
"sd": sd
}
@property
def parameters(self):
return self._parameters
@property
def url(self):
return url(**self.parameters)
def key(self, key=None):
self._parameters["key"] = key
return Search(**self._parameters)
def query(self, *q):
- self._parameters["q"] = [str(tag).strip() for tag in q]
+ self._parameters["q"] = [str(tag).strip() for tag in q if tag]
return Search(**self._parameters)
def descending(self):
self._parameters["sd"] = "desc"
return Search(**self._parameters)
def ascending(self):
self._parameters["sd"] = "asc"
return Search(**self._parameters)
def sort_by(self, sf):
self._parameters["sf"] = sf
return Search(**self._parameters)
|
Add check for empty tags
|
## Code Before:
from .request import url
class Search(object):
def __init__(self, key=None, q=[], sf="created_at", sd="desc"):
self._parameters = {
"key": key,
"q": q,
"sf": sf,
"sd": sd
}
@property
def parameters(self):
return self._parameters
@property
def url(self):
return url(**self.parameters)
def key(self, key=None):
self._parameters["key"] = key
return Search(**self._parameters)
def query(self, *q):
self._parameters["q"] = [str(tag).strip() for tag in q]
return Search(**self._parameters)
def descending(self):
self._parameters["sd"] = "desc"
return Search(**self._parameters)
def ascending(self):
self._parameters["sd"] = "asc"
return Search(**self._parameters)
def sort_by(self, sf):
self._parameters["sf"] = sf
return Search(**self._parameters)
## Instruction:
Add check for empty tags
## Code After:
from .request import url
class Search(object):
def __init__(self, key=None, q=[], sf="created_at", sd="desc"):
self._parameters = {
"key": key,
"q": [str(tag).strip() for tag in q if tag],
"sf": sf,
"sd": sd
}
@property
def parameters(self):
return self._parameters
@property
def url(self):
return url(**self.parameters)
def key(self, key=None):
self._parameters["key"] = key
return Search(**self._parameters)
def query(self, *q):
self._parameters["q"] = [str(tag).strip() for tag in q if tag]
return Search(**self._parameters)
def descending(self):
self._parameters["sd"] = "desc"
return Search(**self._parameters)
def ascending(self):
self._parameters["sd"] = "asc"
return Search(**self._parameters)
def sort_by(self, sf):
self._parameters["sf"] = sf
return Search(**self._parameters)
|
# ... existing code ...
"key": key,
"q": [str(tag).strip() for tag in q if tag],
"sf": sf,
# ... modified code ...
def query(self, *q):
self._parameters["q"] = [str(tag).strip() for tag in q if tag]
return Search(**self._parameters)
# ... rest of the code ...
|
a5f34a8011718ba31dc3d70d761bc4583112f133
|
common/morse_parse.py
|
common/morse_parse.py
|
f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
|
import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
|
Make morse parser not assume that the current working directory is common/
|
Make morse parser not assume that the current working directory is common/
|
Python
|
mit
|
nickodell/morse-code
|
- f = open("morse_table.txt")
+ import inspect, os
+ common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
+
+ f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
|
Make morse parser not assume that the current working directory is common/
|
## Code Before:
f = open("morse_table.txt")
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
## Instruction:
Make morse parser not assume that the current working directory is common/
## Code After:
import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
morse_table = f.read()
morse_table = dict([(morse[0:1], morse[2:len(morse)]) for morse in morse_table.split("\n")])
f.close()
|
...
import inspect, os
common_dir = os.path.dirname(inspect.getfile(inspect.currentframe())) # script directory
f = open(os.path.join(common_dir, "morse_table.txt"))
...
|
358bdb98ba4a17c75773c7b09853580f5e7dd4e7
|
tests/people_test.py
|
tests/people_test.py
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
Adjust test_person_made_works to keep consistency.
|
Adjust test_person_made_works to keep consistency.
|
Python
|
mit
|
item4/cliche,item4/cliche,clicheio/cliche,clicheio/cliche,clicheio/cliche
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
- assert len(fx_people.clamp_member_1.credits) == 1
+ assert fx_people.clamp_member_1.credits == {
+ fx_works.skura_member_asso_1
+ }
- for asso in fx_people.clamp_member_1.credits:
- assert asso.person == fx_people.clamp_member_1
- assert asso.work == fx_works.cardcaptor_sakura
- assert asso.role == 'Artist'
|
Adjust test_person_made_works to keep consistency.
|
## Code Before:
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
## Instruction:
Adjust test_person_made_works to keep consistency.
## Code After:
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
// ... existing code ...
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
// ... rest of the code ...
|
224abc99becc1683605a6dc5c3460510efef3efb
|
tests/test_pyserial.py
|
tests/test_pyserial.py
|
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
|
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
'''
# This test is commented out because it requires an actual serial port.
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
'''
if __name__ == '__main__':
unittest.main()
|
Comment out the pyserial TestIsCorrectVariant test.
|
Comment out the pyserial TestIsCorrectVariant test.
|
Python
|
agpl-3.0
|
Jnesselr/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g,makerbot/s3g,makerbot/s3g
|
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
-
+ '''
+ # This test is commented out because it requires an actual serial port.
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
+ '''
if __name__ == '__main__':
unittest.main()
|
Comment out the pyserial TestIsCorrectVariant test.
|
## Code Before:
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
## Instruction:
Comment out the pyserial TestIsCorrectVariant test.
## Code After:
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
'''
# This test is commented out because it requires an actual serial port.
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
'''
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
'''
# This test is commented out because it requires an actual serial port.
def test_variantDoesBlocking(self):
// ... modified code ...
s = serial.Serial(portname)
'''
// ... rest of the code ...
|
a80f5bad5369ae9a7ae3ab6914d3e9e642062ec3
|
odl/contrib/param_opt/test/test_param_opt.py
|
odl/contrib/param_opt/test/test_param_opt.py
|
import pytest
import odl
import odl.contrib.fom
import odl.contrib.param_opt
from odl.util.testutils import simple_fixture
space = simple_fixture('space',
[odl.rn(3),
odl.uniform_discr([0, 0], [1, 1], [9, 11]),
odl.uniform_discr(0, 1, 10)])
def test_optimal_parameters(space):
"""Tests if optimal_parameters works for some simple examples."""
fom = odl.contrib.fom.mean_squared_error
mynoise = odl.phantom.white_noise(space)
phantoms = [mynoise]
data = [mynoise]
def reconstruction(data, lam):
"""Perturbs the data by adding lam to it."""
return data + lam
result = odl.contrib.param_opt.optimal_parameters(reconstruction, fom,
phantoms, data, 1)
assert result == pytest.approx(0)
if __name__ == '__main__':
odl.util.test_file(__file__)
|
import pytest
import odl
import odl.contrib.fom
import odl.contrib.param_opt
from odl.util.testutils import simple_fixture
space = simple_fixture('space',
[odl.rn(3),
odl.uniform_discr([0, 0], [1, 1], [9, 11]),
odl.uniform_discr(0, 1, 10)])
fom = simple_fixture('fom',
[odl.contrib.fom.mean_squared_error,
odl.contrib.fom.mean_absolute_error])
def test_optimal_parameters_one_parameter(space, fom):
"""Tests if optimal_parameters works for some simple examples."""
# fom = odl.contrib.fom.mean_squared_error
mynoise = odl.phantom.white_noise(space)
phantoms = [mynoise]
data = [mynoise]
def reconstruction(data, lam):
"""Perturbs the data by adding lam to it."""
return data + lam
result = odl.contrib.param_opt.optimal_parameters(reconstruction, fom,
phantoms, data, 1)
assert result == pytest.approx(0, abs=1e-4)
if __name__ == '__main__':
odl.util.test_file(__file__)
|
Add fixture for FOM for test_optimal_parameters
|
TST: Add fixture for FOM for test_optimal_parameters
|
Python
|
mpl-2.0
|
odlgroup/odl,odlgroup/odl,kohr-h/odl,kohr-h/odl
|
import pytest
import odl
import odl.contrib.fom
import odl.contrib.param_opt
from odl.util.testutils import simple_fixture
space = simple_fixture('space',
[odl.rn(3),
odl.uniform_discr([0, 0], [1, 1], [9, 11]),
odl.uniform_discr(0, 1, 10)])
+ fom = simple_fixture('fom',
+ [odl.contrib.fom.mean_squared_error,
+ odl.contrib.fom.mean_absolute_error])
+
- def test_optimal_parameters(space):
+ def test_optimal_parameters_one_parameter(space, fom):
"""Tests if optimal_parameters works for some simple examples."""
- fom = odl.contrib.fom.mean_squared_error
+ # fom = odl.contrib.fom.mean_squared_error
mynoise = odl.phantom.white_noise(space)
phantoms = [mynoise]
data = [mynoise]
def reconstruction(data, lam):
"""Perturbs the data by adding lam to it."""
return data + lam
result = odl.contrib.param_opt.optimal_parameters(reconstruction, fom,
phantoms, data, 1)
- assert result == pytest.approx(0)
+ assert result == pytest.approx(0, abs=1e-4)
if __name__ == '__main__':
odl.util.test_file(__file__)
|
Add fixture for FOM for test_optimal_parameters
|
## Code Before:
import pytest
import odl
import odl.contrib.fom
import odl.contrib.param_opt
from odl.util.testutils import simple_fixture
space = simple_fixture('space',
[odl.rn(3),
odl.uniform_discr([0, 0], [1, 1], [9, 11]),
odl.uniform_discr(0, 1, 10)])
def test_optimal_parameters(space):
"""Tests if optimal_parameters works for some simple examples."""
fom = odl.contrib.fom.mean_squared_error
mynoise = odl.phantom.white_noise(space)
phantoms = [mynoise]
data = [mynoise]
def reconstruction(data, lam):
"""Perturbs the data by adding lam to it."""
return data + lam
result = odl.contrib.param_opt.optimal_parameters(reconstruction, fom,
phantoms, data, 1)
assert result == pytest.approx(0)
if __name__ == '__main__':
odl.util.test_file(__file__)
## Instruction:
Add fixture for FOM for test_optimal_parameters
## Code After:
import pytest
import odl
import odl.contrib.fom
import odl.contrib.param_opt
from odl.util.testutils import simple_fixture
space = simple_fixture('space',
[odl.rn(3),
odl.uniform_discr([0, 0], [1, 1], [9, 11]),
odl.uniform_discr(0, 1, 10)])
fom = simple_fixture('fom',
[odl.contrib.fom.mean_squared_error,
odl.contrib.fom.mean_absolute_error])
def test_optimal_parameters_one_parameter(space, fom):
"""Tests if optimal_parameters works for some simple examples."""
# fom = odl.contrib.fom.mean_squared_error
mynoise = odl.phantom.white_noise(space)
phantoms = [mynoise]
data = [mynoise]
def reconstruction(data, lam):
"""Perturbs the data by adding lam to it."""
return data + lam
result = odl.contrib.param_opt.optimal_parameters(reconstruction, fom,
phantoms, data, 1)
assert result == pytest.approx(0, abs=1e-4)
if __name__ == '__main__':
odl.util.test_file(__file__)
|
// ... existing code ...
fom = simple_fixture('fom',
[odl.contrib.fom.mean_squared_error,
odl.contrib.fom.mean_absolute_error])
def test_optimal_parameters_one_parameter(space, fom):
"""Tests if optimal_parameters works for some simple examples."""
# fom = odl.contrib.fom.mean_squared_error
mynoise = odl.phantom.white_noise(space)
// ... modified code ...
phantoms, data, 1)
assert result == pytest.approx(0, abs=1e-4)
// ... rest of the code ...
|
348c28bacececb787ab73c9716dc515d0fabbe4b
|
armstrong/hatband/widgets/visualsearch.py
|
armstrong/hatband/widgets/visualsearch.py
|
from django.forms import Widget
from django.template.loader import render_to_string
from ..utils import static_url
class GenericKeyWidget(Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"),
static_url("hatband/css/generickey.css"),
)
}
def __init__(self, object_id_name="object_id",
content_type_name="content_type", *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
self.object_id_name = object_id_name
self.content_type_name = content_type_name
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs["value"] = value
final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1
final_attrs["object_id_name"] = self.object_id_name
final_attrs["content_type_name"] = self.content_type_name
return render_to_string(self.template, final_attrs)
|
from django.forms import Widget
from django.template.loader import render_to_string
from ..utils import static_url
class GenericKeyWidget(Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"),
static_url("hatband/css/generickey.css"),
)
}
def __init__(self, object_id_name="object_id",
content_type_name="content_type", *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
self.object_id_name = object_id_name
self.content_type_name = content_type_name
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs.update({
"value": value,
"is_templated": final_attrs["id"].find("__prefix__") > -1,
"object_id_name": self.object_id_name,
"content_type_name": self.content_type_name,
})
return render_to_string(self.template, final_attrs)
|
Clean up this code a bit (no functional change)
|
Clean up this code a bit (no functional change)
|
Python
|
apache-2.0
|
armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband
|
from django.forms import Widget
from django.template.loader import render_to_string
from ..utils import static_url
class GenericKeyWidget(Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"),
static_url("hatband/css/generickey.css"),
)
}
def __init__(self, object_id_name="object_id",
content_type_name="content_type", *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
self.object_id_name = object_id_name
self.content_type_name = content_type_name
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
- final_attrs["value"] = value
+ final_attrs.update({
+ "value": value,
- final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1
+ "is_templated": final_attrs["id"].find("__prefix__") > -1,
- final_attrs["object_id_name"] = self.object_id_name
+ "object_id_name": self.object_id_name,
- final_attrs["content_type_name"] = self.content_type_name
+ "content_type_name": self.content_type_name,
+ })
return render_to_string(self.template, final_attrs)
|
Clean up this code a bit (no functional change)
|
## Code Before:
from django.forms import Widget
from django.template.loader import render_to_string
from ..utils import static_url
class GenericKeyWidget(Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"),
static_url("hatband/css/generickey.css"),
)
}
def __init__(self, object_id_name="object_id",
content_type_name="content_type", *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
self.object_id_name = object_id_name
self.content_type_name = content_type_name
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs["value"] = value
final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1
final_attrs["object_id_name"] = self.object_id_name
final_attrs["content_type_name"] = self.content_type_name
return render_to_string(self.template, final_attrs)
## Instruction:
Clean up this code a bit (no functional change)
## Code After:
from django.forms import Widget
from django.template.loader import render_to_string
from ..utils import static_url
class GenericKeyWidget(Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"),
static_url("hatband/css/generickey.css"),
)
}
def __init__(self, object_id_name="object_id",
content_type_name="content_type", *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
self.object_id_name = object_id_name
self.content_type_name = content_type_name
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs.update({
"value": value,
"is_templated": final_attrs["id"].find("__prefix__") > -1,
"object_id_name": self.object_id_name,
"content_type_name": self.content_type_name,
})
return render_to_string(self.template, final_attrs)
|
...
final_attrs = self.build_attrs(attrs, name=name)
final_attrs.update({
"value": value,
"is_templated": final_attrs["id"].find("__prefix__") > -1,
"object_id_name": self.object_id_name,
"content_type_name": self.content_type_name,
})
return render_to_string(self.template, final_attrs)
...
|
f1da9bc9aae253779121f2b844e684c4ea4dd15f
|
seeker/migrations/0001_initial.py
|
seeker/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='SavedSearch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('url', models.CharField(max_length=200, db_index=True)),
('querystring', models.TextField(blank=True)),
('default', models.BooleanField(default=False)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': (b'name',),
'verbose_name_plural': b'saved searches',
},
bases=(models.Model,),
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='SavedSearch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('url', models.CharField(max_length=200, db_index=True)),
('querystring', models.TextField(blank=True)),
('default', models.BooleanField(default=False)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': (b'name',),
'verbose_name_plural': b'saved searches',
},
bases=(models.Model,),
),
]
|
Add related_name to initial migration so it doesn't try to later
|
Add related_name to initial migration so it doesn't try to later
|
Python
|
bsd-2-clause
|
imsweb/django-seeker,imsweb/django-seeker
|
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='SavedSearch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('url', models.CharField(max_length=200, db_index=True)),
('querystring', models.TextField(blank=True)),
('default', models.BooleanField(default=False)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
- ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
+ ('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': (b'name',),
'verbose_name_plural': b'saved searches',
},
bases=(models.Model,),
),
]
|
Add related_name to initial migration so it doesn't try to later
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='SavedSearch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('url', models.CharField(max_length=200, db_index=True)),
('querystring', models.TextField(blank=True)),
('default', models.BooleanField(default=False)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': (b'name',),
'verbose_name_plural': b'saved searches',
},
bases=(models.Model,),
),
]
## Instruction:
Add related_name to initial migration so it doesn't try to later
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='SavedSearch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('url', models.CharField(max_length=200, db_index=True)),
('querystring', models.TextField(blank=True)),
('default', models.BooleanField(default=False)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': (b'name',),
'verbose_name_plural': b'saved searches',
},
bases=(models.Model,),
),
]
|
...
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)),
],
...
|
c0787c468e1b71d7e9db93b5f5990ae9bb506d82
|
pystruct/datasets/dataset_loaders.py
|
pystruct/datasets/dataset_loaders.py
|
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'))
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'))
return cPickle.load(data_file)
|
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'),'rb')
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'),'rb')
return cPickle.load(data_file)
|
FIX other two sample data load for Windows
|
FIX other two sample data load for Windows
|
Python
|
bsd-2-clause
|
massmutual/pystruct,pystruct/pystruct,amueller/pystruct,d-mittal/pystruct,wattlebird/pystruct,pystruct/pystruct,d-mittal/pystruct,wattlebird/pystruct,massmutual/pystruct,amueller/pystruct
|
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
- data_file = open(join(module_path, 'scene.pickle'))
+ data_file = open(join(module_path, 'scene.pickle'),'rb')
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
- data_file = open(join(module_path, 'snakes.pickle'))
+ data_file = open(join(module_path, 'snakes.pickle'),'rb')
return cPickle.load(data_file)
|
FIX other two sample data load for Windows
|
## Code Before:
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'))
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'))
return cPickle.load(data_file)
## Instruction:
FIX other two sample data load for Windows
## Code After:
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'),'rb')
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'),'rb')
return cPickle.load(data_file)
|
// ... existing code ...
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'),'rb')
return cPickle.load(data_file)
// ... modified code ...
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'),'rb')
return cPickle.load(data_file)
// ... rest of the code ...
|
06bfabc328c4aa32120fd7e52302db76974c2d1b
|
greengraph/command.py
|
greengraph/command.py
|
from argparse import ArgumentParser
from matplotlib import pyplot as plt
from graph import Greengraph
def process():
parser = ArgumentParser(
description="Produce graph quantifying the amount of green land between two locations")
parser.add_argument("--start", nargs="+",
help="The starting location, defaults to London ")
parser.add_argument("--end", nargs="+",
help="The ending location, defaults to Durham")
parser.add_argument("--steps", type=int,
help="An integer number of steps between the starting and ending locations, defaults to 10")
parser.add_argument("--out",
help="The output filename, defaults to graph.png")
arguments = parser.parse_args()
if arguments.start & & arguments.end:
mygraph = Greengraph(arguments.start, arguments.end)
else:
mygraph = Greengraph("London", "Durham")
if arguments.steps:
data = mygraph.green_between(arguments.steps)
else:
data = mygraph.green_between(10)
plt.plot(data)
plt.xlabel("Step")
plt.ylabel("Greenness")
if arguments.start & & arguments.end:
plt.title("Graph of green land between " +
" ".join(arguments.start) + " and " + " ".join(arguments.end))
else:
plt.title("Graph of green land between London and Durham")
if arguments.out:
plt.savefig(arguments.out)
else:
plt.savefig("graph.png")
if __name__ == "__main__":
process()
|
from argparse import ArgumentParser
from matplotlib import pyplot as plt
from graph import Greengraph
def process():
parser = ArgumentParser(
description="Produce graph quantifying the amount of green land between two locations")
parser.add_argument("--start", nargs="+",
help="The starting location, defaults to London ")
parser.add_argument("--end", nargs="+",
help="The ending location, defaults to Durham")
parser.add_argument("--steps", type=int,
help="An integer number of steps between the starting and ending locations, defaults to 10")
parser.add_argument("--out",
help="The output filename, defaults to graph.png")
arguments = parser.parse_args()
if arguments.start and arguments.end:
mygraph = Greengraph(arguments.start, arguments.end)
else:
mygraph = Greengraph("London", "Durham")
if arguments.steps:
data = mygraph.green_between(arguments.steps)
else:
data = mygraph.green_between(10)
plt.plot(data)
plt.xlabel("Step")
plt.ylabel("Number of green pixels (Max 160000)")
if arguments.start and arguments.end:
plt.title("Graph of green land between " +
" ".join(arguments.start) + " and " + " ".join(arguments.end))
else:
plt.title("Graph of green land between London and Durham")
if arguments.out:
plt.savefig(arguments.out)
else:
plt.savefig("graph.png")
if __name__ == "__main__":
process()
|
Correct error where && was used instead of and
|
Correct error where && was used instead of and
|
Python
|
mit
|
MikeVasmer/GreenGraphCoursework
|
from argparse import ArgumentParser
from matplotlib import pyplot as plt
from graph import Greengraph
def process():
parser = ArgumentParser(
description="Produce graph quantifying the amount of green land between two locations")
parser.add_argument("--start", nargs="+",
help="The starting location, defaults to London ")
parser.add_argument("--end", nargs="+",
help="The ending location, defaults to Durham")
parser.add_argument("--steps", type=int,
help="An integer number of steps between the starting and ending locations, defaults to 10")
parser.add_argument("--out",
help="The output filename, defaults to graph.png")
arguments = parser.parse_args()
- if arguments.start & & arguments.end:
+ if arguments.start and arguments.end:
mygraph = Greengraph(arguments.start, arguments.end)
else:
mygraph = Greengraph("London", "Durham")
if arguments.steps:
data = mygraph.green_between(arguments.steps)
else:
data = mygraph.green_between(10)
plt.plot(data)
plt.xlabel("Step")
- plt.ylabel("Greenness")
+ plt.ylabel("Number of green pixels (Max 160000)")
- if arguments.start & & arguments.end:
+ if arguments.start and arguments.end:
plt.title("Graph of green land between " +
" ".join(arguments.start) + " and " + " ".join(arguments.end))
else:
plt.title("Graph of green land between London and Durham")
if arguments.out:
plt.savefig(arguments.out)
else:
plt.savefig("graph.png")
if __name__ == "__main__":
process()
|
Correct error where && was used instead of and
|
## Code Before:
from argparse import ArgumentParser
from matplotlib import pyplot as plt
from graph import Greengraph
def process():
parser = ArgumentParser(
description="Produce graph quantifying the amount of green land between two locations")
parser.add_argument("--start", nargs="+",
help="The starting location, defaults to London ")
parser.add_argument("--end", nargs="+",
help="The ending location, defaults to Durham")
parser.add_argument("--steps", type=int,
help="An integer number of steps between the starting and ending locations, defaults to 10")
parser.add_argument("--out",
help="The output filename, defaults to graph.png")
arguments = parser.parse_args()
if arguments.start & & arguments.end:
mygraph = Greengraph(arguments.start, arguments.end)
else:
mygraph = Greengraph("London", "Durham")
if arguments.steps:
data = mygraph.green_between(arguments.steps)
else:
data = mygraph.green_between(10)
plt.plot(data)
plt.xlabel("Step")
plt.ylabel("Greenness")
if arguments.start & & arguments.end:
plt.title("Graph of green land between " +
" ".join(arguments.start) + " and " + " ".join(arguments.end))
else:
plt.title("Graph of green land between London and Durham")
if arguments.out:
plt.savefig(arguments.out)
else:
plt.savefig("graph.png")
if __name__ == "__main__":
process()
## Instruction:
Correct error where && was used instead of and
## Code After:
from argparse import ArgumentParser
from matplotlib import pyplot as plt
from graph import Greengraph
def process():
parser = ArgumentParser(
description="Produce graph quantifying the amount of green land between two locations")
parser.add_argument("--start", nargs="+",
help="The starting location, defaults to London ")
parser.add_argument("--end", nargs="+",
help="The ending location, defaults to Durham")
parser.add_argument("--steps", type=int,
help="An integer number of steps between the starting and ending locations, defaults to 10")
parser.add_argument("--out",
help="The output filename, defaults to graph.png")
arguments = parser.parse_args()
if arguments.start and arguments.end:
mygraph = Greengraph(arguments.start, arguments.end)
else:
mygraph = Greengraph("London", "Durham")
if arguments.steps:
data = mygraph.green_between(arguments.steps)
else:
data = mygraph.green_between(10)
plt.plot(data)
plt.xlabel("Step")
plt.ylabel("Number of green pixels (Max 160000)")
if arguments.start and arguments.end:
plt.title("Graph of green land between " +
" ".join(arguments.start) + " and " + " ".join(arguments.end))
else:
plt.title("Graph of green land between London and Durham")
if arguments.out:
plt.savefig(arguments.out)
else:
plt.savefig("graph.png")
if __name__ == "__main__":
process()
|
# ... existing code ...
if arguments.start and arguments.end:
mygraph = Greengraph(arguments.start, arguments.end)
# ... modified code ...
plt.xlabel("Step")
plt.ylabel("Number of green pixels (Max 160000)")
if arguments.start and arguments.end:
plt.title("Graph of green land between " +
# ... rest of the code ...
|
6672a0634265e09366a9274d3c2a04afca49cf02
|
dirtree_filter.py
|
dirtree_filter.py
|
class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
self.hidden_exts = [".pyc", ".pyo", ".o", ".a", ".obj", ".lib", ".swp", "~"]
self.hidden_dirs = ["CVS", "__pycache__"]
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
elif info.is_dir:
if not self.show_dirs:
return False
if info.filename in self.hidden_dirs:
return False
for ext in self.hidden_exts:
if info.filename.endswith(ext):
return False
if info.filename.startswith(".#"):
return False
return True
|
import re
def compile_file_patterns(patterns):
return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns))
hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"]
hidden_dirs = ["CVS", "__pycache__"]
class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True,
hidden_files=hidden_files, hidden_dirs=hidden_dirs):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
self.r_hidden_file = compile_file_patterns(hidden_files)
self.r_hidden_dir = compile_file_patterns(hidden_dirs)
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
if info.is_dir:
if not self.show_dirs:
return False
if self.r_hidden_dir.match(info.filename):
return False
else:
if self.r_hidden_file.match(info.filename):
return False
return True
|
Use file patterns compiled to regular expressions to match hidden files.
|
Use file patterns compiled to regular expressions to match hidden files.
|
Python
|
mit
|
shaurz/devo
|
+ import re
+
+ def compile_file_patterns(patterns):
+ return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns))
+
+ hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"]
+ hidden_dirs = ["CVS", "__pycache__"]
+
class DirTreeFilter(object):
- def __init__(self, show_hidden=False, show_files=True, show_dirs=True):
+ def __init__(self, show_hidden=False, show_files=True, show_dirs=True,
+ hidden_files=hidden_files, hidden_dirs=hidden_dirs):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
- self.hidden_exts = [".pyc", ".pyo", ".o", ".a", ".obj", ".lib", ".swp", "~"]
- self.hidden_dirs = ["CVS", "__pycache__"]
+ self.r_hidden_file = compile_file_patterns(hidden_files)
+ self.r_hidden_dir = compile_file_patterns(hidden_dirs)
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
- elif info.is_dir:
+ if info.is_dir:
if not self.show_dirs:
return False
- if info.filename in self.hidden_dirs:
+ if self.r_hidden_dir.match(info.filename):
return False
- for ext in self.hidden_exts:
- if info.filename.endswith(ext):
+ else:
+ if self.r_hidden_file.match(info.filename):
return False
- if info.filename.startswith(".#"):
- return False
return True
|
Use file patterns compiled to regular expressions to match hidden files.
|
## Code Before:
class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
self.hidden_exts = [".pyc", ".pyo", ".o", ".a", ".obj", ".lib", ".swp", "~"]
self.hidden_dirs = ["CVS", "__pycache__"]
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
elif info.is_dir:
if not self.show_dirs:
return False
if info.filename in self.hidden_dirs:
return False
for ext in self.hidden_exts:
if info.filename.endswith(ext):
return False
if info.filename.startswith(".#"):
return False
return True
## Instruction:
Use file patterns compiled to regular expressions to match hidden files.
## Code After:
import re
def compile_file_patterns(patterns):
return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns))
hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"]
hidden_dirs = ["CVS", "__pycache__"]
class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True,
hidden_files=hidden_files, hidden_dirs=hidden_dirs):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
self.r_hidden_file = compile_file_patterns(hidden_files)
self.r_hidden_dir = compile_file_patterns(hidden_dirs)
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
if info.is_dir:
if not self.show_dirs:
return False
if self.r_hidden_dir.match(info.filename):
return False
else:
if self.r_hidden_file.match(info.filename):
return False
return True
|
# ... existing code ...
import re
def compile_file_patterns(patterns):
return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns))
hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"]
hidden_dirs = ["CVS", "__pycache__"]
class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True,
hidden_files=hidden_files, hidden_dirs=hidden_dirs):
self.show_hidden = show_hidden
# ... modified code ...
self.show_dirs = show_dirs
self.r_hidden_file = compile_file_patterns(hidden_files)
self.r_hidden_dir = compile_file_patterns(hidden_dirs)
...
return False
if info.is_dir:
if not self.show_dirs:
...
return False
if self.r_hidden_dir.match(info.filename):
return False
else:
if self.r_hidden_file.match(info.filename):
return False
return True
# ... rest of the code ...
|
746c06ba70cd2854a86ea8bc45fc8e3e6192f67c
|
app.py
|
app.py
|
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
Add hashtag for currency name and symbol
|
Add hashtag for currency name and symbol
|
Python
|
mit
|
erickgnavar/coinstats
|
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
+ #{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
Add hashtag for currency name and symbol
|
## Code Before:
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
## Instruction:
Add hashtag for currency name and symbol
## Code After:
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
...
Ranking: {rank}
#{name} #{symbol}
"""
...
|
ef2f5bf541ab2938f19b11c0845610ccce5e496e
|
test/__init__.py
|
test/__init__.py
|
import unittest
class TestCase(unittest.TestCase):
pass
|
import platform
(major, minor, patch) = platform.python_version_tuple()
if int(major) == 2 and int(minor) < 7:
import unittest2 as unittest
else:
import unittest
class TestCase(unittest.TestCase):
pass
|
Make unit tests run on RHEL boxes better via the python-unittest2 library
|
Make unit tests run on RHEL boxes better via the python-unittest2 library
|
Python
|
agpl-3.0
|
pombredanne/re-core,RHInception/re-core
|
+ import platform
+ (major, minor, patch) = platform.python_version_tuple()
+ if int(major) == 2 and int(minor) < 7:
+ import unittest2 as unittest
+ else:
- import unittest
+ import unittest
class TestCase(unittest.TestCase):
pass
|
Make unit tests run on RHEL boxes better via the python-unittest2 library
|
## Code Before:
import unittest
class TestCase(unittest.TestCase):
pass
## Instruction:
Make unit tests run on RHEL boxes better via the python-unittest2 library
## Code After:
import platform
(major, minor, patch) = platform.python_version_tuple()
if int(major) == 2 and int(minor) < 7:
import unittest2 as unittest
else:
import unittest
class TestCase(unittest.TestCase):
pass
|
...
import platform
(major, minor, patch) = platform.python_version_tuple()
if int(major) == 2 and int(minor) < 7:
import unittest2 as unittest
else:
import unittest
...
|
4955e830d3130a6ae86d4a1c37db23777ee792d7
|
go_http/__init__.py
|
go_http/__init__.py
|
"""Vumi Go HTTP API client library."""
from .send import HttpApiSender, LoggingSender
__version__ = "0.3.1a0"
__all__ = [
'HttpApiSender', 'LoggingSender',
]
|
"""Vumi Go HTTP API client library."""
from .send import HttpApiSender, LoggingSender
from .account import AccountApiClient
__version__ = "0.3.1a0"
__all__ = [
'HttpApiSender', 'LoggingSender',
'AccountApiClient',
]
|
Add AccountApiClient to top-level package.
|
Add AccountApiClient to top-level package.
|
Python
|
bsd-3-clause
|
praekelt/go-http-api,praekelt/go-http-api
|
"""Vumi Go HTTP API client library."""
from .send import HttpApiSender, LoggingSender
+ from .account import AccountApiClient
__version__ = "0.3.1a0"
__all__ = [
'HttpApiSender', 'LoggingSender',
+ 'AccountApiClient',
]
|
Add AccountApiClient to top-level package.
|
## Code Before:
"""Vumi Go HTTP API client library."""
from .send import HttpApiSender, LoggingSender
__version__ = "0.3.1a0"
__all__ = [
'HttpApiSender', 'LoggingSender',
]
## Instruction:
Add AccountApiClient to top-level package.
## Code After:
"""Vumi Go HTTP API client library."""
from .send import HttpApiSender, LoggingSender
from .account import AccountApiClient
__version__ = "0.3.1a0"
__all__ = [
'HttpApiSender', 'LoggingSender',
'AccountApiClient',
]
|
// ... existing code ...
from .send import HttpApiSender, LoggingSender
from .account import AccountApiClient
// ... modified code ...
'HttpApiSender', 'LoggingSender',
'AccountApiClient',
]
// ... rest of the code ...
|
aee49d59b76400389ffa768950b479094059e385
|
linguist/tests/translations.py
|
linguist/tests/translations.py
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
Update test models for new metaclass support.
|
Update test models for new metaclass support.
|
Python
|
mit
|
ulule/django-linguist
|
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
- title = models.CharField(max_length=255, null=True, blank=True)
+ title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
+ class Meta:
+ linguist = {
+ 'identifier': 'foo',
-
- class FooTranslation(ModelTranslationBase):
- model = FooModel
- identifier = 'foo'
- fields = ('title', 'excerpt', 'body')
+ 'fields': ('title', 'excerpt', 'body'),
+ }
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
+ class Meta:
+ linguist = {
+ 'identifier': 'bar',
-
- class BarTranslation(ModelTranslationBase):
- model = BarModel
- identifier = 'bar'
- fields = ('title', )
+ 'fields': ('title', ),
+ }
class BadTranslation(object):
pass
class BadModel(object):
pass
|
Update test models for new metaclass support.
|
## Code Before:
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
## Instruction:
Update test models for new metaclass support.
## Code After:
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
// ... existing code ...
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
// ... modified code ...
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
...
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
// ... rest of the code ...
|
e1240aa33b286ba52507128458fc6d6b3b68dfb3
|
statsmodels/stats/multicomp.py
|
statsmodels/stats/multicomp.py
|
from statsmodels.sandbox.stats.multicomp import MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
Put back an import that my IDE incorrectly flagged as unused
|
Put back an import that my IDE incorrectly flagged as unused
|
Python
|
bsd-3-clause
|
gef756/statsmodels,detrout/debian-statsmodels,detrout/debian-statsmodels,bzero/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,rgommers/statsmodels,hlin117/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,hainm/statsmodels,musically-ut/statsmodels,gef756/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,adammenges/statsmodels,waynenilsen/statsmodels,bzero/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,huongttlan/statsmodels,alekz112/statsmodels,adammenges/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,yl565/statsmodels,phobson/statsmodels,alekz112/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,saketkc/statsmodels,hainm/statsmodels,hlin117/statsmodels,kiyoto/statsmodels,YihaoLu/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,gef756/statsmodels,phobson/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,astocko/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,bzero/statsmodels,jseabold/statsmodels,bert9bert/statsmodels,hlin117/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,detrout/debian-statsmodels,hainm/statsmodels,wwf5067/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,bert9bert/statsmodels,alekz112/statsmodels,nguyentu1602/statsmodels,hainm/statsmodels,bert9bert/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,bzero/statsmodels,kiyoto/statsmodels,wwf5067/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,nvoron23/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,Averroes/statsmodels,astocko/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,bzero/statsmodels,wzbozon/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,hlin117/statsmodels,alekz112/statsmodels,astocko/statsmodels,musically-ut/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,yl565/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,astocko/statsmodels,bashtage/statsmodels,phobson/statsmodels,Averroes/statsmodels,huongttlan/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,wkfwkf/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,gef756/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,bsipocz/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,detrout/debian-statsmodels,yl565/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels
|
- from statsmodels.sandbox.stats.multicomp import MultiComparison
+ from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
Put back an import that my IDE incorrectly flagged as unused
|
## Code Before:
from statsmodels.sandbox.stats.multicomp import MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
## Instruction:
Put back an import that my IDE incorrectly flagged as unused
## Code After:
from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
# ... existing code ...
from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison
# ... rest of the code ...
|
c424744af801241fbdced0e3344c1f9b6f2c6416
|
citenet/cli.py
|
citenet/cli.py
|
"""Command-line interface for citenet."""
def main():
"""Run the CLI."""
print('CiteNet CLI')
|
"""Command-line interface for citenet."""
import sys
import json
import citenet
def main():
"""Run the CLI."""
if len(sys.argv) != 2:
print("Usage: {} <config_file>")
with open(sys.argv[1]) as config_file:
config = json.load(config_file)
graph = citenet.read_csv_graph(**config['graph'])
|
Add rudimentary JSON configuration to CLI
|
Add rudimentary JSON configuration to CLI
|
Python
|
mit
|
Pringley/citenet
|
"""Command-line interface for citenet."""
+
+ import sys
+ import json
+ import citenet
def main():
"""Run the CLI."""
- print('CiteNet CLI')
+ if len(sys.argv) != 2:
+ print("Usage: {} <config_file>")
+ with open(sys.argv[1]) as config_file:
+ config = json.load(config_file)
+
+ graph = citenet.read_csv_graph(**config['graph'])
+
|
Add rudimentary JSON configuration to CLI
|
## Code Before:
"""Command-line interface for citenet."""
def main():
"""Run the CLI."""
print('CiteNet CLI')
## Instruction:
Add rudimentary JSON configuration to CLI
## Code After:
"""Command-line interface for citenet."""
import sys
import json
import citenet
def main():
"""Run the CLI."""
if len(sys.argv) != 2:
print("Usage: {} <config_file>")
with open(sys.argv[1]) as config_file:
config = json.load(config_file)
graph = citenet.read_csv_graph(**config['graph'])
|
...
"""Command-line interface for citenet."""
import sys
import json
import citenet
...
"""Run the CLI."""
if len(sys.argv) != 2:
print("Usage: {} <config_file>")
with open(sys.argv[1]) as config_file:
config = json.load(config_file)
graph = citenet.read_csv_graph(**config['graph'])
...
|
0b5a657339870c7669082c39f8290c88732aa92e
|
extractor.py
|
extractor.py
|
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
if __name__ == '__main__':
runner = ExtractionRunner()
runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
|
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
def get_extraction_runner():
runner = ExtractionRunner()
runner.add_runnable(grobid.GrobidPlainTextExtractor)
# OR
# runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
return runner
if __name__ == '__main__':
runner = get_extraction_runner()
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
|
Make code a little cleaner
|
Make code a little cleaner
|
Python
|
apache-2.0
|
Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,Tiger66639/new-csx-extractor,Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,SeerLabs/new-csx-extractor
|
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
- if __name__ == '__main__':
+ def get_extraction_runner():
runner = ExtractionRunner()
+
+ runner.add_runnable(grobid.GrobidPlainTextExtractor)
+ # OR
- runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
+ # runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
+
runner.add_runnable(filters.AcademicPaperFilter)
+
+ return runner
+
+
+ if __name__ == '__main__':
+ runner = get_extraction_runner()
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
|
Make code a little cleaner
|
## Code Before:
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
if __name__ == '__main__':
runner = ExtractionRunner()
runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
## Instruction:
Make code a little cleaner
## Code After:
from extraction.core import ExtractionRunner
from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult
import os
import sys
import grobid
import pdfbox
import filters
def get_extraction_runner():
runner = ExtractionRunner()
runner.add_runnable(grobid.GrobidPlainTextExtractor)
# OR
# runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
return runner
if __name__ == '__main__':
runner = get_extraction_runner()
argc = len(sys.argv)
if argc == 2:
runner.run_from_file(sys.argv[1])
elif argc == 3:
runner.run_from_file(sys.argv[1], output_dir = sys.argv[2])
else:
print("USAGE: python {0} path_to_pdf [output_directory]")
|
# ... existing code ...
def get_extraction_runner():
# ... modified code ...
runner = ExtractionRunner()
runner.add_runnable(grobid.GrobidPlainTextExtractor)
# OR
# runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor)
runner.add_runnable(filters.AcademicPaperFilter)
return runner
if __name__ == '__main__':
runner = get_extraction_runner()
# ... rest of the code ...
|
d2d822a9fb60bbc8ded7f9e3c70d91cf25f794b2
|
src/volunteers/models.py
|
src/volunteers/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
template += u' (grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
template += u' ({group_name} grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
|
Add group name to volunteer string representation
|
Add group name to volunteer string representation
|
Python
|
mit
|
mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
- template += u' (grupp, {participant_count} osalejat)'
+ template += u' ({group_name} grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
|
Add group name to volunteer string representation
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
template += u' (grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
## Instruction:
Add group name to volunteer string representation
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
template += u' ({group_name} grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
|
// ... existing code ...
if self.is_group:
template += u' ({group_name} grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
// ... rest of the code ...
|
41b241de6f2afa94b442007518d481526bfb66ae
|
linked-list/remove-k-from-list.py
|
linked-list/remove-k-from-list.py
|
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
|
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
|
Add initialization to linked list class
|
Add initialization to linked list class
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
+ class LinkedList(object):
+ def __init__(self, head=None):
+ self.head = head
+
|
Add initialization to linked list class
|
## Code Before:
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
## Instruction:
Add initialization to linked list class
## Code After:
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
|
...
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
...
|
71fd42a92b41529d9f5c784840ab4c190946adef
|
social_auth/backends/pipeline/associate.py
|
social_auth/backends/pipeline/associate.py
|
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
|
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
|
Remove spammy warning which doesn't apply when stores check emails
|
Remove spammy warning which doesn't apply when stores check emails
|
Python
|
bsd-3-clause
|
antoviaque/django-social-auth-norel
|
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
+ # Don't spam with a warning, this doesn't apply when providers check emails
- warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
+ #warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
|
Remove spammy warning which doesn't apply when stores check emails
|
## Code Before:
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
## Instruction:
Remove spammy warning which doesn't apply when stores check emails
## Code After:
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
|
// ... existing code ...
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
// ... rest of the code ...
|
ad934e49a43a8340af9d52bbac86bede45d0e84d
|
aero/adapters/brew.py
|
aero/adapters/brew.py
|
__author__ = 'nickl-'
from aero.__version__ import __version__
from .base import BaseAdapter
class Brew(BaseAdapter):
"""
Homebrew adapter.
"""
def search(self, query):
response = self.command(['search', query])[0]
if 'No formula found' not in response and 'Error:' not in response:
return dict([(
self.package_name(line),
'\n'.join(map(
lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1],
self.search_info(line)
))
) for line in response.splitlines() if line])
return {}
def search_info(self, query):
info = self.info(query)
return filter(
None,
[
info[0],
info[1] if len(info) > 1 else None,
info[2] if len(info) > 2 else None,
]
)
def info(self, query):
if '/' in query:
self.command(['tap', '/'.join(query.split('/')[:-1])])
response = self.command(['info', query])[0]
if 'Error:' not in response:
response = response.replace(query + ': ', 'version: ')
return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line]
return [['No info available']]
def install(self, query):
self.shell(['install', query])
return {}
|
__author__ = 'nickl-'
from aero.__version__ import __version__
from .base import BaseAdapter
class Brew(BaseAdapter):
"""
Homebrew adapter.
"""
def search(self, query):
response = self.command(['search', query])[0]
if 'No formula found' not in response and 'Error:' not in response:
return dict([(
self.package_name(line),
self.search_info(self.package_name(line))
) for line in response.splitlines() if line])
return {}
def search_info(self, query):
response = self._execute_command('aero', ['info', query], False)[0]
from re import split
lines = response.splitlines()
idx = lines.index(' ________________________________________ __________________________________________________ ')
return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]])
def info(self, query):
if '/' in query:
self.command(['tap', '/'.join(query.split('/')[:-1])])
response = self.command(['info', query])[0]
if 'Error:' not in response:
response = response.replace(query + ': ', 'version: ')
return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line]
return [['No info available']]
def install(self, query):
self.shell(['install', query])
return {}
|
Use aero info instead for caching info
|
Use aero info instead for caching info
Brew requires brew info for additional information. If we instead call aero info we can at least cache the info calls for later.
|
Python
|
bsd-3-clause
|
Aeronautics/aero
|
__author__ = 'nickl-'
from aero.__version__ import __version__
from .base import BaseAdapter
class Brew(BaseAdapter):
"""
Homebrew adapter.
"""
def search(self, query):
response = self.command(['search', query])[0]
if 'No formula found' not in response and 'Error:' not in response:
return dict([(
self.package_name(line),
- '\n'.join(map(
- lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1],
- self.search_info(line)
+ self.search_info(self.package_name(line))
- ))
) for line in response.splitlines() if line])
return {}
def search_info(self, query):
+ response = self._execute_command('aero', ['info', query], False)[0]
+ from re import split
+ lines = response.splitlines()
+ idx = lines.index(' ________________________________________ __________________________________________________ ')
+ return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]])
- info = self.info(query)
- return filter(
- None,
- [
- info[0],
- info[1] if len(info) > 1 else None,
- info[2] if len(info) > 2 else None,
- ]
- )
def info(self, query):
if '/' in query:
self.command(['tap', '/'.join(query.split('/')[:-1])])
response = self.command(['info', query])[0]
if 'Error:' not in response:
response = response.replace(query + ': ', 'version: ')
return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line]
return [['No info available']]
def install(self, query):
self.shell(['install', query])
return {}
|
Use aero info instead for caching info
|
## Code Before:
__author__ = 'nickl-'
from aero.__version__ import __version__
from .base import BaseAdapter
class Brew(BaseAdapter):
"""
Homebrew adapter.
"""
def search(self, query):
response = self.command(['search', query])[0]
if 'No formula found' not in response and 'Error:' not in response:
return dict([(
self.package_name(line),
'\n'.join(map(
lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1],
self.search_info(line)
))
) for line in response.splitlines() if line])
return {}
def search_info(self, query):
info = self.info(query)
return filter(
None,
[
info[0],
info[1] if len(info) > 1 else None,
info[2] if len(info) > 2 else None,
]
)
def info(self, query):
if '/' in query:
self.command(['tap', '/'.join(query.split('/')[:-1])])
response = self.command(['info', query])[0]
if 'Error:' not in response:
response = response.replace(query + ': ', 'version: ')
return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line]
return [['No info available']]
def install(self, query):
self.shell(['install', query])
return {}
## Instruction:
Use aero info instead for caching info
## Code After:
__author__ = 'nickl-'
from aero.__version__ import __version__
from .base import BaseAdapter
class Brew(BaseAdapter):
"""
Homebrew adapter.
"""
def search(self, query):
response = self.command(['search', query])[0]
if 'No formula found' not in response and 'Error:' not in response:
return dict([(
self.package_name(line),
self.search_info(self.package_name(line))
) for line in response.splitlines() if line])
return {}
def search_info(self, query):
response = self._execute_command('aero', ['info', query], False)[0]
from re import split
lines = response.splitlines()
idx = lines.index(' ________________________________________ __________________________________________________ ')
return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]])
def info(self, query):
if '/' in query:
self.command(['tap', '/'.join(query.split('/')[:-1])])
response = self.command(['info', query])[0]
if 'Error:' not in response:
response = response.replace(query + ': ', 'version: ')
return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line]
return [['No info available']]
def install(self, query):
self.shell(['install', query])
return {}
|
# ... existing code ...
self.package_name(line),
self.search_info(self.package_name(line))
) for line in response.splitlines() if line])
# ... modified code ...
def search_info(self, query):
response = self._execute_command('aero', ['info', query], False)[0]
from re import split
lines = response.splitlines()
idx = lines.index(' ________________________________________ __________________________________________________ ')
return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]])
# ... rest of the code ...
|
437623aee55fd68683126bd6852df52379837eaa
|
bash_command.py
|
bash_command.py
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
Print both output + error for bash command
|
Print both output + error for bash command
|
Python
|
mit
|
ktuan89/sublimeplugins
|
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
- output, _ = run_bash_for_output(final_command)
+ output, err = run_bash_for_output(final_command)
- print(final_command, " ", output)
+ new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
- results_view.run_command('replace_content', {"new_content": output})
+ results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
Print both output + error for bash command
|
## Code Before:
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
## Instruction:
Print both output + error for bash command
## Code After:
import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
|
...
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
...
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
...
|
c045dc59bc313055eb74513e2961ce2cbae87450
|
corehq/apps/api/util.py
|
corehq/apps/api/util.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain, additional_doc_types=None):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
additional_doc_types = additional_doc_types or []
doc_type = getattr(cls, '_doc_type', cls.__name__)
additional_doc_types.append(doc_type)
try:
doc = cls.get(doc_id)
if doc and doc.domain == domain and doc.doc_type in additional_doc_types:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise object_does_not_exist(doc_type, doc_id)
def object_does_not_exist(doc_type, doc_id):
"""
Builds a 404 error message with standard, translated, verbiage
"""
return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
|
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain, additional_doc_types=None):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
additional_doc_types = additional_doc_types or []
doc_type = getattr(cls, '_doc_type', cls.__name__)
additional_doc_types.append(doc_type)
try:
doc_json = cls.get_db().get(doc_id)
if doc_json['doc_type'] not in additional_doc_types:
raise ResourceNotFound
doc = cls.wrap(doc_json)
if doc and doc.domain == domain:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise object_does_not_exist(doc_type, doc_id)
def object_does_not_exist(doc_type, doc_id):
"""
Builds a 404 error message with standard, translated, verbiage
"""
return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
|
Check doc type before wrapping
|
Check doc type before wrapping
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq
|
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain, additional_doc_types=None):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
additional_doc_types = additional_doc_types or []
doc_type = getattr(cls, '_doc_type', cls.__name__)
additional_doc_types.append(doc_type)
try:
- doc = cls.get(doc_id)
+ doc_json = cls.get_db().get(doc_id)
- if doc and doc.domain == domain and doc.doc_type in additional_doc_types:
+ if doc_json['doc_type'] not in additional_doc_types:
+ raise ResourceNotFound
+ doc = cls.wrap(doc_json)
+ if doc and doc.domain == domain:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise object_does_not_exist(doc_type, doc_id)
def object_does_not_exist(doc_type, doc_id):
"""
Builds a 404 error message with standard, translated, verbiage
"""
return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
|
Check doc type before wrapping
|
## Code Before:
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain, additional_doc_types=None):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
additional_doc_types = additional_doc_types or []
doc_type = getattr(cls, '_doc_type', cls.__name__)
additional_doc_types.append(doc_type)
try:
doc = cls.get(doc_id)
if doc and doc.domain == domain and doc.doc_type in additional_doc_types:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise object_does_not_exist(doc_type, doc_id)
def object_does_not_exist(doc_type, doc_id):
"""
Builds a 404 error message with standard, translated, verbiage
"""
return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
## Instruction:
Check doc type before wrapping
## Code After:
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain, additional_doc_types=None):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
additional_doc_types = additional_doc_types or []
doc_type = getattr(cls, '_doc_type', cls.__name__)
additional_doc_types.append(doc_type)
try:
doc_json = cls.get_db().get(doc_id)
if doc_json['doc_type'] not in additional_doc_types:
raise ResourceNotFound
doc = cls.wrap(doc_json)
if doc and doc.domain == domain:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise object_does_not_exist(doc_type, doc_id)
def object_does_not_exist(doc_type, doc_id):
"""
Builds a 404 error message with standard, translated, verbiage
"""
return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
|
# ... existing code ...
try:
doc_json = cls.get_db().get(doc_id)
if doc_json['doc_type'] not in additional_doc_types:
raise ResourceNotFound
doc = cls.wrap(doc_json)
if doc and doc.domain == domain:
return doc
# ... rest of the code ...
|
57bc8b3c40bbafda6f69b23c230ad73750e881ab
|
hashable/helpers.py
|
hashable/helpers.py
|
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
'equality_comparable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls = equality_comparable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
def equality_comparable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
|
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
'equalable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls = equalable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
def equalable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
|
Rename decorator equality_comparable to equalable
|
Rename decorator equality_comparable to equalable
|
Python
|
mit
|
minmax/hashable
|
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
- 'equality_comparable',
+ 'equalable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
- cls = equality_comparable(cls, attributes, methods)
+ cls = equalable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
- def equality_comparable(cls=None, attributes=None, methods=None):
+ def equalable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
|
Rename decorator equality_comparable to equalable
|
## Code Before:
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
'equality_comparable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls = equality_comparable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
def equality_comparable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
## Instruction:
Rename decorator equality_comparable to equalable
## Code After:
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
'equalable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls = equalable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
def equalable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
|
...
'hashable',
'equalable',
]
...
def decorator(cls):
cls = equalable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
...
def equalable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
...
|
a600543515c286ed7bcba2bad5a0746588b62f9a
|
app/views.py
|
app/views.py
|
import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
|
import logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
|
Modify function that calculate the expected signature
|
Modify function that calculate the expected signature
|
Python
|
mit
|
rebearteta/social-ideation,joausaga/social-ideation,rebearteta/social-ideation,joausaga/social-ideation,joausaga/social-ideation,rebearteta/social-ideation,rebearteta/social-ideation,joausaga/social-ideation
|
import logging
import hashlib
+ import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
+
+ def _valid_request(app_secret, req_signature, payload):
+ exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
+ return exp_signature == req_signature
+
+
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
+ if _valid_request(fb_app.app_secret,req_signature,request.body):
- logger.info(req_signature)
- exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
- logger.info(exp_signature)
- req_json = json.loads(request.body)
+ req_json = json.loads(request.body)
- if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
|
Modify function that calculate the expected signature
|
## Code Before:
import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
## Instruction:
Modify function that calculate the expected signature
## Code After:
import logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
|
...
import hashlib
import hmac
import json
...
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
...
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
...
|
666b011ef95ef6e82e59cc134b52fb29443ff9d8
|
iroha_cli/crypto.py
|
iroha_cli/crypto.py
|
import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
|
import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
def raw_public_key(self):
return base64.b64decode(self.public_key)
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
|
Add get raw key from KeyPair
|
Add get raw key from KeyPair
|
Python
|
apache-2.0
|
MizukiSonoko/iroha-cli,MizukiSonoko/iroha-cli
|
import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
+ def raw_public_key(self):
+ return base64.b64decode(self.public_key)
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
|
Add get raw key from KeyPair
|
## Code Before:
import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
## Instruction:
Add get raw key from KeyPair
## Code After:
import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
def raw_public_key(self):
return base64.b64decode(self.public_key)
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
|
// ... existing code ...
def raw_public_key(self):
return base64.b64decode(self.public_key)
// ... rest of the code ...
|
8e2a42369228f3d19b046a610c93de4bec06d5bf
|
avocado/core/structures.py
|
avocado/core/structures.py
|
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
|
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
REPR_OUTPUT_SIZE = 20
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = '...(remaining elements truncated)...'
return repr(tuple(data))
|
Add __repr__ to ChoicesDict structure
|
Add __repr__ to ChoicesDict structure
|
Python
|
bsd-2-clause
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
+
+
+ REPR_OUTPUT_SIZE = 20
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
+ def __repr__(self):
+ data = list(self[:REPR_OUTPUT_SIZE + 1])
+
+ if len(data) > REPR_OUTPUT_SIZE:
+ data[-1] = '...(remaining elements truncated)...'
+
+ return repr(tuple(data))
+
|
Add __repr__ to ChoicesDict structure
|
## Code Before:
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
## Instruction:
Add __repr__ to ChoicesDict structure
## Code After:
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
REPR_OUTPUT_SIZE = 20
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = '...(remaining elements truncated)...'
return repr(tuple(data))
|
// ... existing code ...
from ordereddict import OrderedDict
REPR_OUTPUT_SIZE = 20
// ... modified code ...
yield key, self[key]
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = '...(remaining elements truncated)...'
return repr(tuple(data))
// ... rest of the code ...
|
2046d82addab9ec83dbb85a2d08c727a52065d8b
|
deckglue/models.py
|
deckglue/models.py
|
from django.db import models
# Create your models here.
|
from django.contrib.auth.models import Permission
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from cardbox.card_model import Card
from cardbox.deck_model import Deck
from guardian.shortcuts import assign_perm, get_users_with_perms
from guardian.models import UserObjectPermission
from memorize.models import Practice
from django.contrib.auth.models import User
@receiver(post_save, sender=UserObjectPermission)
def create_practice_objects_for_new_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice(item=card, user=User.objects.get(id = kwargs['instance'].user_id)).save()
@receiver(pre_delete, sender=UserObjectPermission)
def delete_practice_objects_for_removed_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice.objects.get(object_id=card.ID, user=User.objects.get(id = kwargs['instance'].user_id)).delete()
@receiver(post_save, sender=Card)
def create_practice_objects_for_new_card(sender,update_fields, **kwargs):
"""Creates practice objects for all users with permission to view the card.
"""
perm_users = get_users_with_perms(kwargs['instance'].deck)
for user in perm_users:
practice = Practice(item = kwargs['instance'], user = user)
if Practice.objects.filter(object_id = kwargs['instance'].ID, user=user).count() == 0:
practice.save()
@receiver(pre_delete, sender=Card)
def delete_practice_objects(sender, **kwargs):
"""Deletes all practice objects for a card once it is deleted.
"""
Practice.objects.filter(object_id = kwargs['instance'].ID).delete()
|
Add signal hooks to create practice objects
|
Add signal hooks to create practice objects
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
- from django.db import models
+ from django.contrib.auth.models import Permission
+ from django.db.models.signals import post_save, pre_delete
+ from django.dispatch import receiver
+ from cardbox.card_model import Card
+ from cardbox.deck_model import Deck
+ from guardian.shortcuts import assign_perm, get_users_with_perms
+ from guardian.models import UserObjectPermission
+ from memorize.models import Practice
+ from django.contrib.auth.models import User
- # Create your models here.
+ @receiver(post_save, sender=UserObjectPermission)
+ def create_practice_objects_for_new_viewers(sender, **kwargs):
+ if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
+ for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
+ Practice(item=card, user=User.objects.get(id = kwargs['instance'].user_id)).save()
+ @receiver(pre_delete, sender=UserObjectPermission)
+ def delete_practice_objects_for_removed_viewers(sender, **kwargs):
+ if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
+ for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
+ Practice.objects.get(object_id=card.ID, user=User.objects.get(id = kwargs['instance'].user_id)).delete()
+
+ @receiver(post_save, sender=Card)
+ def create_practice_objects_for_new_card(sender,update_fields, **kwargs):
+ """Creates practice objects for all users with permission to view the card.
+
+ """
+ perm_users = get_users_with_perms(kwargs['instance'].deck)
+ for user in perm_users:
+ practice = Practice(item = kwargs['instance'], user = user)
+ if Practice.objects.filter(object_id = kwargs['instance'].ID, user=user).count() == 0:
+ practice.save()
+
+
+
+ @receiver(pre_delete, sender=Card)
+ def delete_practice_objects(sender, **kwargs):
+ """Deletes all practice objects for a card once it is deleted.
+
+ """
+ Practice.objects.filter(object_id = kwargs['instance'].ID).delete()
+
+
|
Add signal hooks to create practice objects
|
## Code Before:
from django.db import models
# Create your models here.
## Instruction:
Add signal hooks to create practice objects
## Code After:
from django.contrib.auth.models import Permission
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from cardbox.card_model import Card
from cardbox.deck_model import Deck
from guardian.shortcuts import assign_perm, get_users_with_perms
from guardian.models import UserObjectPermission
from memorize.models import Practice
from django.contrib.auth.models import User
@receiver(post_save, sender=UserObjectPermission)
def create_practice_objects_for_new_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice(item=card, user=User.objects.get(id = kwargs['instance'].user_id)).save()
@receiver(pre_delete, sender=UserObjectPermission)
def delete_practice_objects_for_removed_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice.objects.get(object_id=card.ID, user=User.objects.get(id = kwargs['instance'].user_id)).delete()
@receiver(post_save, sender=Card)
def create_practice_objects_for_new_card(sender,update_fields, **kwargs):
"""Creates practice objects for all users with permission to view the card.
"""
perm_users = get_users_with_perms(kwargs['instance'].deck)
for user in perm_users:
practice = Practice(item = kwargs['instance'], user = user)
if Practice.objects.filter(object_id = kwargs['instance'].ID, user=user).count() == 0:
practice.save()
@receiver(pre_delete, sender=Card)
def delete_practice_objects(sender, **kwargs):
"""Deletes all practice objects for a card once it is deleted.
"""
Practice.objects.filter(object_id = kwargs['instance'].ID).delete()
|
// ... existing code ...
from django.contrib.auth.models import Permission
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from cardbox.card_model import Card
from cardbox.deck_model import Deck
from guardian.shortcuts import assign_perm, get_users_with_perms
from guardian.models import UserObjectPermission
from memorize.models import Practice
from django.contrib.auth.models import User
@receiver(post_save, sender=UserObjectPermission)
def create_practice_objects_for_new_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice(item=card, user=User.objects.get(id = kwargs['instance'].user_id)).save()
@receiver(pre_delete, sender=UserObjectPermission)
def delete_practice_objects_for_removed_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice.objects.get(object_id=card.ID, user=User.objects.get(id = kwargs['instance'].user_id)).delete()
@receiver(post_save, sender=Card)
def create_practice_objects_for_new_card(sender,update_fields, **kwargs):
"""Creates practice objects for all users with permission to view the card.
"""
perm_users = get_users_with_perms(kwargs['instance'].deck)
for user in perm_users:
practice = Practice(item = kwargs['instance'], user = user)
if Practice.objects.filter(object_id = kwargs['instance'].ID, user=user).count() == 0:
practice.save()
@receiver(pre_delete, sender=Card)
def delete_practice_objects(sender, **kwargs):
"""Deletes all practice objects for a card once it is deleted.
"""
Practice.objects.filter(object_id = kwargs['instance'].ID).delete()
// ... rest of the code ...
|
0a336447546442ab5d48716223713135a4812adf
|
get_problem.py
|
get_problem.py
|
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
if __name__ == '__main__':
main()
|
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
print("'''")
print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
print("'''")
if __name__ == '__main__':
main()
|
ADD comment for python file
|
ADD comment for python file
|
Python
|
mit
|
byung-u/ProjectEuler
|
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
+ print("'''")
+ print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
-
+ print("'''")
if __name__ == '__main__':
main()
|
ADD comment for python file
|
## Code Before:
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
if __name__ == '__main__':
main()
## Instruction:
ADD comment for python file
## Code After:
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
print("'''")
print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
print("'''")
if __name__ == '__main__':
main()
|
# ... existing code ...
soup = BeautifulSoup(r.text, 'html.parser')
print("'''")
print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
# ... modified code ...
print(content.text)
print("'''")
# ... rest of the code ...
|
7e00b8a4436ee4bdad4d248a29985b1cef741a53
|
nimbus/apps/media/utils.py
|
nimbus/apps/media/utils.py
|
def bsd_rand(seed):
return (1103515245 * seed + 12345) & 0x7fffffff
def baseconv(v1, a1, a2):
n1 = {c: i for i, c in dict(enumerate(a1)).items()}
b1 = len(a1)
b2 = len(a2)
d1 = 0
for i, c in enumerate(v1):
d1 += n1[c] * pow(b1, b1 - i - 1)
v2 = ""
while d1:
v2 = a2[d1 % b2] + v2
d1 //= b2
return v2
def url_hash_from_pk(pk):
b10 = "0123456789"
b62 = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return baseconv(str(bsd_rand(pk)), b10, b62)
|
from nimbus.settings import SECRET_KEY
import hashlib
def baseconv(v1, a1, a2):
n1 = {c: i for i, c in enumerate(a1)}
b1 = len(a1)
b2 = len(a2)
d1 = 0
for i, c in enumerate(v1):
d1 += n1[c] * pow(b1, len(v1) - i - 1)
v2 = ""
while d1:
v2 = a2[d1 % b2] + v2
d1 //= b2
return v2
m = hashlib.md5()
m.update(SECRET_KEY)
c = int(baseconv(m.hexdigest(), "0123456789abcdef", "0123456789"))
c = c - (c % 2) + 1
def lcg(seed):
return (1103515245 * seed + c) & 0x7fffffff
def url_hash_from_pk(pk):
b10 = "0123456789"
b62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
return baseconv(str(lcg(pk)), b10, b62)
|
Patch bug and security vulnerability
|
Patch bug and security vulnerability
|
Python
|
mit
|
ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus
|
- def bsd_rand(seed):
- return (1103515245 * seed + 12345) & 0x7fffffff
+ from nimbus.settings import SECRET_KEY
+ import hashlib
def baseconv(v1, a1, a2):
- n1 = {c: i for i, c in dict(enumerate(a1)).items()}
+ n1 = {c: i for i, c in enumerate(a1)}
b1 = len(a1)
b2 = len(a2)
d1 = 0
for i, c in enumerate(v1):
- d1 += n1[c] * pow(b1, b1 - i - 1)
+ d1 += n1[c] * pow(b1, len(v1) - i - 1)
v2 = ""
while d1:
v2 = a2[d1 % b2] + v2
d1 //= b2
return v2
+ m = hashlib.md5()
+ m.update(SECRET_KEY)
+ c = int(baseconv(m.hexdigest(), "0123456789abcdef", "0123456789"))
+ c = c - (c % 2) + 1
+
+
+ def lcg(seed):
+ return (1103515245 * seed + c) & 0x7fffffff
+
+
def url_hash_from_pk(pk):
b10 = "0123456789"
- b62 = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ b62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
- return baseconv(str(bsd_rand(pk)), b10, b62)
+ return baseconv(str(lcg(pk)), b10, b62)
|
Patch bug and security vulnerability
|
## Code Before:
def bsd_rand(seed):
return (1103515245 * seed + 12345) & 0x7fffffff
def baseconv(v1, a1, a2):
n1 = {c: i for i, c in dict(enumerate(a1)).items()}
b1 = len(a1)
b2 = len(a2)
d1 = 0
for i, c in enumerate(v1):
d1 += n1[c] * pow(b1, b1 - i - 1)
v2 = ""
while d1:
v2 = a2[d1 % b2] + v2
d1 //= b2
return v2
def url_hash_from_pk(pk):
b10 = "0123456789"
b62 = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
return baseconv(str(bsd_rand(pk)), b10, b62)
## Instruction:
Patch bug and security vulnerability
## Code After:
from nimbus.settings import SECRET_KEY
import hashlib
def baseconv(v1, a1, a2):
n1 = {c: i for i, c in enumerate(a1)}
b1 = len(a1)
b2 = len(a2)
d1 = 0
for i, c in enumerate(v1):
d1 += n1[c] * pow(b1, len(v1) - i - 1)
v2 = ""
while d1:
v2 = a2[d1 % b2] + v2
d1 //= b2
return v2
m = hashlib.md5()
m.update(SECRET_KEY)
c = int(baseconv(m.hexdigest(), "0123456789abcdef", "0123456789"))
c = c - (c % 2) + 1
def lcg(seed):
return (1103515245 * seed + c) & 0x7fffffff
def url_hash_from_pk(pk):
b10 = "0123456789"
b62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
return baseconv(str(lcg(pk)), b10, b62)
|
// ... existing code ...
from nimbus.settings import SECRET_KEY
import hashlib
// ... modified code ...
def baseconv(v1, a1, a2):
n1 = {c: i for i, c in enumerate(a1)}
b1 = len(a1)
...
for i, c in enumerate(v1):
d1 += n1[c] * pow(b1, len(v1) - i - 1)
...
m = hashlib.md5()
m.update(SECRET_KEY)
c = int(baseconv(m.hexdigest(), "0123456789abcdef", "0123456789"))
c = c - (c % 2) + 1
def lcg(seed):
return (1103515245 * seed + c) & 0x7fffffff
def url_hash_from_pk(pk):
...
b10 = "0123456789"
b62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
return baseconv(str(lcg(pk)), b10, b62)
// ... rest of the code ...
|
ffd4c52155acd7d04939e766ebe63171b580a2fa
|
src/__init__.py
|
src/__init__.py
|
import os
import logging
from kaa.base import ipc
from client import *
from server import *
__all__ = [ 'connect' ]
# connected client object
_client = None
def connect(epgdb, logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
"""
"""
global _client
# get server filename
server = os.path.join(os.path.dirname(__file__), 'server.py')
if epgdb.find(':') >= 0:
# epg is remote: host:port
# TODO: create socket, pass it to client
_client = GuideClient("epg")
else:
# epg is local
_client = ipc.launch([server, logfile, str(loglevel), epgdb], 2, GuideClient, "epg")
return _client
|
import os
import logging
from socket import gethostbyname, gethostname
from kaa.base import ipc
from client import *
from server import *
__all__ = [ 'connect', 'DEFAULT_EPG_PORT', 'GuideClient', 'GuideServer' ]
# connected client object
_client = None
def connect(epgdb, address='127.0.0.1', logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
"""
"""
global _client
if _client:
return _client
if address.split(':')[0] not in ['127.0.0.1', '0.0.0.0'] and \
address != gethostbyname(gethostname()):
# epg is remote: host:port
if address.find(':') >= 0:
host, port = address.split(':', 1)
else:
host = address
port = DEFAULT_EPG_PORT
# create socket, pass it to client
_client = GuideClient((host, port))
else:
# EPG is local, only use unix socket
# get server filename
server = os.path.join(os.path.dirname(__file__), 'server.py')
_client = ipc.launch([server, logfile, str(loglevel), epgdb, address],
2, GuideClient, "epg")
return _client
|
Add the ability to use inet socket as well.
|
Add the ability to use inet socket as well.
git-svn-id: ffaf500d3baede20d2f41eac1d275ef07405e077@1236 a8f5125c-1e01-0410-8897-facf34644b8e
|
Python
|
lgpl-2.1
|
freevo/kaa-epg
|
import os
import logging
+ from socket import gethostbyname, gethostname
from kaa.base import ipc
from client import *
from server import *
- __all__ = [ 'connect' ]
+ __all__ = [ 'connect', 'DEFAULT_EPG_PORT', 'GuideClient', 'GuideServer' ]
# connected client object
_client = None
- def connect(epgdb, logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
+ def connect(epgdb, address='127.0.0.1', logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
"""
"""
global _client
- # get server filename
- server = os.path.join(os.path.dirname(__file__), 'server.py')
+ if _client:
+ return _client
- if epgdb.find(':') >= 0:
+ if address.split(':')[0] not in ['127.0.0.1', '0.0.0.0'] and \
+ address != gethostbyname(gethostname()):
# epg is remote: host:port
+ if address.find(':') >= 0:
+ host, port = address.split(':', 1)
+ else:
+ host = address
+ port = DEFAULT_EPG_PORT
+
- # TODO: create socket, pass it to client
+ # create socket, pass it to client
- _client = GuideClient("epg")
+ _client = GuideClient((host, port))
else:
- # epg is local
+ # EPG is local, only use unix socket
+
+ # get server filename
+ server = os.path.join(os.path.dirname(__file__), 'server.py')
+
- _client = ipc.launch([server, logfile, str(loglevel), epgdb], 2, GuideClient, "epg")
+ _client = ipc.launch([server, logfile, str(loglevel), epgdb, address],
+ 2, GuideClient, "epg")
+
return _client
|
Add the ability to use inet socket as well.
|
## Code Before:
import os
import logging
from kaa.base import ipc
from client import *
from server import *
__all__ = [ 'connect' ]
# connected client object
_client = None
def connect(epgdb, logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
"""
"""
global _client
# get server filename
server = os.path.join(os.path.dirname(__file__), 'server.py')
if epgdb.find(':') >= 0:
# epg is remote: host:port
# TODO: create socket, pass it to client
_client = GuideClient("epg")
else:
# epg is local
_client = ipc.launch([server, logfile, str(loglevel), epgdb], 2, GuideClient, "epg")
return _client
## Instruction:
Add the ability to use inet socket as well.
## Code After:
import os
import logging
from socket import gethostbyname, gethostname
from kaa.base import ipc
from client import *
from server import *
__all__ = [ 'connect', 'DEFAULT_EPG_PORT', 'GuideClient', 'GuideServer' ]
# connected client object
_client = None
def connect(epgdb, address='127.0.0.1', logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
"""
"""
global _client
if _client:
return _client
if address.split(':')[0] not in ['127.0.0.1', '0.0.0.0'] and \
address != gethostbyname(gethostname()):
# epg is remote: host:port
if address.find(':') >= 0:
host, port = address.split(':', 1)
else:
host = address
port = DEFAULT_EPG_PORT
# create socket, pass it to client
_client = GuideClient((host, port))
else:
# EPG is local, only use unix socket
# get server filename
server = os.path.join(os.path.dirname(__file__), 'server.py')
_client = ipc.launch([server, logfile, str(loglevel), epgdb, address],
2, GuideClient, "epg")
return _client
|
...
import logging
from socket import gethostbyname, gethostname
...
__all__ = [ 'connect', 'DEFAULT_EPG_PORT', 'GuideClient', 'GuideServer' ]
...
def connect(epgdb, address='127.0.0.1', logfile='/tmp/kaa-epg.log', loglevel=logging.INFO):
"""
...
if _client:
return _client
if address.split(':')[0] not in ['127.0.0.1', '0.0.0.0'] and \
address != gethostbyname(gethostname()):
# epg is remote: host:port
if address.find(':') >= 0:
host, port = address.split(':', 1)
else:
host = address
port = DEFAULT_EPG_PORT
# create socket, pass it to client
_client = GuideClient((host, port))
...
else:
# EPG is local, only use unix socket
# get server filename
server = os.path.join(os.path.dirname(__file__), 'server.py')
_client = ipc.launch([server, logfile, str(loglevel), epgdb, address],
2, GuideClient, "epg")
return _client
...
|
826f23f0fc7eea4c72dcc26f637f3752bee51b47
|
test/ctypesgentest.py
|
test/ctypesgentest.py
|
import optparse, sys, StringIO
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
|
import optparse, sys, StringIO
sys.path.append(".") # Allow tests to be called from parent directory with Python 2.6
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
|
Allow tests to be called from parent directory of "test"
|
Allow tests to be called from parent directory of "test"
git-svn-id: 397be6d5b34b040010577acc149a81bea378be26@89 1754e6c4-832e-0410-bb55-0fb906f63d99
|
Python
|
bsd-3-clause
|
kanzure/ctypesgen,kanzure/ctypesgen,novas0x2a/ctypesgen,davidjamesca/ctypesgen,kanzure/ctypesgen
|
import optparse, sys, StringIO
+ sys.path.append(".") # Allow tests to be called from parent directory with Python 2.6
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
|
Allow tests to be called from parent directory of "test"
|
## Code Before:
import optparse, sys, StringIO
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
## Instruction:
Allow tests to be called from parent directory of "test"
## Code After:
import optparse, sys, StringIO
sys.path.append(".") # Allow tests to be called from parent directory with Python 2.6
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
|
# ... existing code ...
import optparse, sys, StringIO
sys.path.append(".") # Allow tests to be called from parent directory with Python 2.6
sys.path.append("..")
# ... rest of the code ...
|
3de4665adae5f289fa896aa211ec32f72d956342
|
testproject/testproject/urls.py
|
testproject/testproject/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Remove use of deprecated patterns function
|
Remove use of deprecated patterns function
|
Python
|
mit
|
vittoriozamboni/django-groups-manager,vittoriozamboni/django-groups-manager
|
from django.conf import settings
- from django.conf.urls import patterns, include, url
+ from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
- urlpatterns = patterns('',
+ urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
- ) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
+ ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Remove use of deprecated patterns function
|
## Code Before:
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
## Instruction:
Remove use of deprecated patterns function
## Code After:
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from testproject import views
urlpatterns = [
# Examples:
# url(r'^$', 'testproject.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.TestView.as_view(), name='home'),
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
...
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
...
urlpatterns = [
# Examples:
...
url(r'^groups-manager/', include('groups_manager.urls', namespace='groups_manager')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
...
|
4375e1d72832f9672eaba87019be9b769eb69e78
|
alg_hash_string.py
|
alg_hash_string.py
|
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
|
Add importing absolute_import & division from Prague
|
Add importing absolute_import & division from Prague
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
+ from __future__ import absolute_import
+ from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
|
Add importing absolute_import & division from Prague
|
## Code Before:
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
## Instruction:
Add importing absolute_import & division from Prague
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
|
# ... existing code ...
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# ... rest of the code ...
|
f1e516e8002425f5f4f9904096848798b2bc97fa
|
jesusmtnez/python/kata/game.py
|
jesusmtnez/python/kata/game.py
|
class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
|
class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1 if pins < 10 else 2
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_strike(frame):
score += 10 + self._rolls[frame + 2] + self._rolls[frame + 3]
elif self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _is_strike(self, frame):
print(frame)
return self._rolls[frame] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
|
Add strikes support when rolling
|
[Python] Add strikes support when rolling
|
Python
|
mit
|
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
|
class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
- self._current_roll += 1
+ self._current_roll += 1 if pins < 10 else 2
def score(self):
score = 0
for frame in range(0, 20, 2):
+ if self._is_strike(frame):
+ score += 10 + self._rolls[frame + 2] + self._rolls[frame + 3]
- if self._is_spare(frame):
+ elif self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
+ def _is_strike(self, frame):
+ print(frame)
+ return self._rolls[frame] == 10
+
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
|
Add strikes support when rolling
|
## Code Before:
class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
## Instruction:
Add strikes support when rolling
## Code After:
class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1 if pins < 10 else 2
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_strike(frame):
score += 10 + self._rolls[frame + 2] + self._rolls[frame + 3]
elif self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _is_strike(self, frame):
print(frame)
return self._rolls[frame] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
|
...
self._rolls[self._current_roll] += pins
self._current_roll += 1 if pins < 10 else 2
...
for frame in range(0, 20, 2):
if self._is_strike(frame):
score += 10 + self._rolls[frame + 2] + self._rolls[frame + 3]
elif self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
...
def _is_strike(self, frame):
print(frame)
return self._rolls[frame] == 10
def _frame_score(self, frame):
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.