commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7576d63bc2061074a41685c69546a4a5d57bc3fb
|
unihan_db/__about__.py
|
unihan_db/__about__.py
|
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
|
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__docs__ = 'https://unihan-db.git-pull.com'
__tracker__ = 'https://github.com/cihai/unihan-db/issues'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
|
Add docs and tracker to metadata
|
Add docs and tracker to metadata
|
Python
|
mit
|
cihai/unihan-db
|
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
+ __docs__ = 'https://unihan-db.git-pull.com'
+ __tracker__ = 'https://github.com/cihai/unihan-db/issues'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
|
Add docs and tracker to metadata
|
## Code Before:
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
## Instruction:
Add docs and tracker to metadata
## Code After:
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__docs__ = 'https://unihan-db.git-pull.com'
__tracker__ = 'https://github.com/cihai/unihan-db/issues'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
|
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
+ __docs__ = 'https://unihan-db.git-pull.com'
+ __tracker__ = 'https://github.com/cihai/unihan-db/issues'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
|
2a12a7d2e2d06e64ca076563b8b68454e92fefae
|
service_fabfile.py
|
service_fabfile.py
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
Deploy static files during installation
|
Deploy static files during installation
|
Python
|
bsd-3-clause
|
CorbanU/corban-shopify,CorbanU/corban-shopify
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
+ deploy_static(static_dir='../assets/', cmd='python manage.py')
|
Deploy static files during installation
|
## Code Before:
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
## Instruction:
Deploy static files during installation
## Code After:
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
+ deploy_static(static_dir='../assets/', cmd='python manage.py')
|
691ccb9e99240f36ab954974e1ecbdea61c4c7b6
|
datagroupings/templatetags/key.py
|
datagroupings/templatetags/key.py
|
import json
from django import template
register = template.Library()
@register.filter(name='key')
def key(d, key_name):
if key_name in d:
return d[key_name]
return ''
@register.filter(name='value')
def value(d, key_name):
if key_name in d:
return d[key_name]
return ''
@register.filter(name='minval')
def minval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
minval = json.loads(d.get(key_name)).get('minval')
if minval is not None:
return minval
return ''
@register.filter(name='maxval')
def maxval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
maxval = json.loads(d.get(key_name)).get('maxval')
if maxval is not None:
return maxval
return ''
|
import json
from django import template
register = template.Library()
@register.filter(name='key')
def key(d, key_name):
if d is not None:
if key_name in d:
return d[key_name]
return ''
@register.filter(name='value')
def value(d, key_name):
if d is not None:
if key_name in d:
return d[key_name]
return ''
@register.filter(name='minval')
def minval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
minval = json.loads(d.get(key_name)).get('minval')
if minval is not None:
return minval
return ''
@register.filter(name='maxval')
def maxval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
maxval = json.loads(d.get(key_name)).get('maxval')
if maxval is not None:
return maxval
return ''
|
Fix TemplateTag issue with filters
|
Fix TemplateTag issue with filters
|
Python
|
apache-2.0
|
nagyistoce/geokey,nagyistoce/geokey,nagyistoce/geokey
|
import json
from django import template
register = template.Library()
@register.filter(name='key')
def key(d, key_name):
+ if d is not None:
- if key_name in d:
+ if key_name in d:
- return d[key_name]
+ return d[key_name]
return ''
@register.filter(name='value')
def value(d, key_name):
+ if d is not None:
- if key_name in d:
+ if key_name in d:
- return d[key_name]
+ return d[key_name]
return ''
@register.filter(name='minval')
def minval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
minval = json.loads(d.get(key_name)).get('minval')
if minval is not None:
return minval
return ''
@register.filter(name='maxval')
def maxval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
maxval = json.loads(d.get(key_name)).get('maxval')
if maxval is not None:
return maxval
return ''
|
Fix TemplateTag issue with filters
|
## Code Before:
import json
from django import template
register = template.Library()
@register.filter(name='key')
def key(d, key_name):
if key_name in d:
return d[key_name]
return ''
@register.filter(name='value')
def value(d, key_name):
if key_name in d:
return d[key_name]
return ''
@register.filter(name='minval')
def minval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
minval = json.loads(d.get(key_name)).get('minval')
if minval is not None:
return minval
return ''
@register.filter(name='maxval')
def maxval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
maxval = json.loads(d.get(key_name)).get('maxval')
if maxval is not None:
return maxval
return ''
## Instruction:
Fix TemplateTag issue with filters
## Code After:
import json
from django import template
register = template.Library()
@register.filter(name='key')
def key(d, key_name):
if d is not None:
if key_name in d:
return d[key_name]
return ''
@register.filter(name='value')
def value(d, key_name):
if d is not None:
if key_name in d:
return d[key_name]
return ''
@register.filter(name='minval')
def minval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
minval = json.loads(d.get(key_name)).get('minval')
if minval is not None:
return minval
return ''
@register.filter(name='maxval')
def maxval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
maxval = json.loads(d.get(key_name)).get('maxval')
if maxval is not None:
return maxval
return ''
|
import json
from django import template
register = template.Library()
@register.filter(name='key')
def key(d, key_name):
+ if d is not None:
- if key_name in d:
+ if key_name in d:
? ++++
- return d[key_name]
+ return d[key_name]
? ++++
return ''
@register.filter(name='value')
def value(d, key_name):
+ if d is not None:
- if key_name in d:
+ if key_name in d:
? ++++
- return d[key_name]
+ return d[key_name]
? ++++
return ''
@register.filter(name='minval')
def minval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
minval = json.loads(d.get(key_name)).get('minval')
if minval is not None:
return minval
return ''
@register.filter(name='maxval')
def maxval(d, key_name):
if d is not None:
if d.get(key_name) is not None:
maxval = json.loads(d.get(key_name)).get('maxval')
if maxval is not None:
return maxval
return ''
|
a03b166f8297783819a43eeb78e5af4d52d11bcc
|
carbonate/list.py
|
carbonate/list.py
|
import os
import re
# Use the built-in version of scandir/walk if possible, otherwise
# use the scandir module version
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
def listMetrics(storage_dir, follow_sym_links=False, metric_suffix='wsp'):
metric_regex = re.compile(".*\.%s$" % metric_suffix)
storage_dir = storage_dir.rstrip(os.sep)
for root, dirnames, filenames in walk(storage_dir,
followlinks=follow_sym_links):
for filename in filenames:
if metric_regex.match(filename):
root_path = root[len(storage_dir) + 1:]
m_path = os.path.join(root_path, filename)
m_name, m_ext = os.path.splitext(m_path)
m_name = m_name.replace('/', '.')
yield m_name
|
import os
import re
# Use the built-in version of scandir/walk if possible, otherwise
# use the scandir module version
try:
from os import scandir, walk # noqa # pylint: disable=unused-import
except ImportError:
from scandir import scandir, walk # noqa # pylint: disable=unused-import
def listMetrics(storage_dir, follow_sym_links=False, metric_suffix='wsp'):
metric_regex = re.compile(".*\.%s$" % metric_suffix)
storage_dir = storage_dir.rstrip(os.sep)
for root, _, filenames in walk(storage_dir, followlinks=follow_sym_links):
for filename in filenames:
if metric_regex.match(filename):
root_path = root[len(storage_dir) + 1:]
m_path = os.path.join(root_path, filename)
m_name, m_ext = os.path.splitext(m_path)
m_name = m_name.replace('/', '.')
yield m_name
|
Make pylint happy as per graphite-web example
|
Make pylint happy as per graphite-web example
|
Python
|
mit
|
criteo-forks/carbonate,jssjr/carbonate,deniszh/carbonate,graphite-project/carbonate,jssjr/carbonate,graphite-project/carbonate,criteo-forks/carbonate,jssjr/carbonate,deniszh/carbonate,deniszh/carbonate,criteo-forks/carbonate,graphite-project/carbonate
|
import os
import re
# Use the built-in version of scandir/walk if possible, otherwise
# use the scandir module version
try:
- from os import scandir, walk
+ from os import scandir, walk # noqa # pylint: disable=unused-import
except ImportError:
- from scandir import scandir, walk
+ from scandir import scandir, walk # noqa # pylint: disable=unused-import
def listMetrics(storage_dir, follow_sym_links=False, metric_suffix='wsp'):
metric_regex = re.compile(".*\.%s$" % metric_suffix)
storage_dir = storage_dir.rstrip(os.sep)
+ for root, _, filenames in walk(storage_dir, followlinks=follow_sym_links):
- for root, dirnames, filenames in walk(storage_dir,
- followlinks=follow_sym_links):
for filename in filenames:
if metric_regex.match(filename):
root_path = root[len(storage_dir) + 1:]
m_path = os.path.join(root_path, filename)
m_name, m_ext = os.path.splitext(m_path)
m_name = m_name.replace('/', '.')
yield m_name
|
Make pylint happy as per graphite-web example
|
## Code Before:
import os
import re
# Use the built-in version of scandir/walk if possible, otherwise
# use the scandir module version
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
def listMetrics(storage_dir, follow_sym_links=False, metric_suffix='wsp'):
metric_regex = re.compile(".*\.%s$" % metric_suffix)
storage_dir = storage_dir.rstrip(os.sep)
for root, dirnames, filenames in walk(storage_dir,
followlinks=follow_sym_links):
for filename in filenames:
if metric_regex.match(filename):
root_path = root[len(storage_dir) + 1:]
m_path = os.path.join(root_path, filename)
m_name, m_ext = os.path.splitext(m_path)
m_name = m_name.replace('/', '.')
yield m_name
## Instruction:
Make pylint happy as per graphite-web example
## Code After:
import os
import re
# Use the built-in version of scandir/walk if possible, otherwise
# use the scandir module version
try:
from os import scandir, walk # noqa # pylint: disable=unused-import
except ImportError:
from scandir import scandir, walk # noqa # pylint: disable=unused-import
def listMetrics(storage_dir, follow_sym_links=False, metric_suffix='wsp'):
metric_regex = re.compile(".*\.%s$" % metric_suffix)
storage_dir = storage_dir.rstrip(os.sep)
for root, _, filenames in walk(storage_dir, followlinks=follow_sym_links):
for filename in filenames:
if metric_regex.match(filename):
root_path = root[len(storage_dir) + 1:]
m_path = os.path.join(root_path, filename)
m_name, m_ext = os.path.splitext(m_path)
m_name = m_name.replace('/', '.')
yield m_name
|
import os
import re
# Use the built-in version of scandir/walk if possible, otherwise
# use the scandir module version
try:
- from os import scandir, walk
+ from os import scandir, walk # noqa # pylint: disable=unused-import
except ImportError:
- from scandir import scandir, walk
+ from scandir import scandir, walk # noqa # pylint: disable=unused-import
def listMetrics(storage_dir, follow_sym_links=False, metric_suffix='wsp'):
metric_regex = re.compile(".*\.%s$" % metric_suffix)
storage_dir = storage_dir.rstrip(os.sep)
+ for root, _, filenames in walk(storage_dir, followlinks=follow_sym_links):
- for root, dirnames, filenames in walk(storage_dir,
- followlinks=follow_sym_links):
for filename in filenames:
if metric_regex.match(filename):
root_path = root[len(storage_dir) + 1:]
m_path = os.path.join(root_path, filename)
m_name, m_ext = os.path.splitext(m_path)
m_name = m_name.replace('/', '.')
yield m_name
|
254702c1b5a76701a1437d6dc3d732ec27ebaa81
|
backslash/api_object.py
|
backslash/api_object.py
|
class APIObject(object):
def __init__(self, client, json_data):
super(APIObject, self).__init__()
self.client = client
self._data = json_data
def __eq__(self, other):
if not isinstance(other, APIObject):
return NotImplemented
return self.client is other.client and self._data == other._data
def __ne__(self, other):
return not (self == other)
def __getattr__(self, name):
try:
return self.__dict__['_data'][name]
except KeyError:
raise AttributeError(name)
def refresh(self):
self._data = self._fetch()
def _fetch(self):
return self.client.api.get(self.api_path, raw=True)[self._data['type']]
def __repr__(self):
return '<API:{data[type]}:{data[id]}>'.format(data=self._data)
|
class APIObject(object):
def __init__(self, client, json_data):
super(APIObject, self).__init__()
self.client = client
self._data = json_data
def __eq__(self, other):
if not isinstance(other, APIObject):
return NotImplemented
return self.client is other.client and self._data == other._data
def __ne__(self, other):
return not (self == other)
def __getattr__(self, name):
try:
return self.__dict__['_data'][name]
except KeyError:
raise AttributeError(name)
def refresh(self):
self._data = self._fetch()
return self
def _fetch(self):
return self.client.api.get(self.api_path, raw=True)[self._data['type']]
def __repr__(self):
return '<API:{data[type]}:{data[id]}>'.format(data=self._data)
|
Make `refresh` return self for chaining actions
|
Make `refresh` return self for chaining actions
|
Python
|
bsd-3-clause
|
slash-testing/backslash-python,vmalloc/backslash-python
|
class APIObject(object):
def __init__(self, client, json_data):
super(APIObject, self).__init__()
self.client = client
self._data = json_data
def __eq__(self, other):
if not isinstance(other, APIObject):
return NotImplemented
return self.client is other.client and self._data == other._data
def __ne__(self, other):
return not (self == other)
def __getattr__(self, name):
try:
return self.__dict__['_data'][name]
except KeyError:
raise AttributeError(name)
def refresh(self):
self._data = self._fetch()
+ return self
def _fetch(self):
return self.client.api.get(self.api_path, raw=True)[self._data['type']]
def __repr__(self):
return '<API:{data[type]}:{data[id]}>'.format(data=self._data)
|
Make `refresh` return self for chaining actions
|
## Code Before:
class APIObject(object):
def __init__(self, client, json_data):
super(APIObject, self).__init__()
self.client = client
self._data = json_data
def __eq__(self, other):
if not isinstance(other, APIObject):
return NotImplemented
return self.client is other.client and self._data == other._data
def __ne__(self, other):
return not (self == other)
def __getattr__(self, name):
try:
return self.__dict__['_data'][name]
except KeyError:
raise AttributeError(name)
def refresh(self):
self._data = self._fetch()
def _fetch(self):
return self.client.api.get(self.api_path, raw=True)[self._data['type']]
def __repr__(self):
return '<API:{data[type]}:{data[id]}>'.format(data=self._data)
## Instruction:
Make `refresh` return self for chaining actions
## Code After:
class APIObject(object):
def __init__(self, client, json_data):
super(APIObject, self).__init__()
self.client = client
self._data = json_data
def __eq__(self, other):
if not isinstance(other, APIObject):
return NotImplemented
return self.client is other.client and self._data == other._data
def __ne__(self, other):
return not (self == other)
def __getattr__(self, name):
try:
return self.__dict__['_data'][name]
except KeyError:
raise AttributeError(name)
def refresh(self):
self._data = self._fetch()
return self
def _fetch(self):
return self.client.api.get(self.api_path, raw=True)[self._data['type']]
def __repr__(self):
return '<API:{data[type]}:{data[id]}>'.format(data=self._data)
|
class APIObject(object):
def __init__(self, client, json_data):
super(APIObject, self).__init__()
self.client = client
self._data = json_data
def __eq__(self, other):
if not isinstance(other, APIObject):
return NotImplemented
return self.client is other.client and self._data == other._data
def __ne__(self, other):
return not (self == other)
def __getattr__(self, name):
try:
return self.__dict__['_data'][name]
except KeyError:
raise AttributeError(name)
def refresh(self):
self._data = self._fetch()
+ return self
def _fetch(self):
return self.client.api.get(self.api_path, raw=True)[self._data['type']]
def __repr__(self):
return '<API:{data[type]}:{data[id]}>'.format(data=self._data)
|
f5bb9e5f388c4ac222da2318638266fdfbe925f0
|
beam/vendor.py
|
beam/vendor.py
|
from __future__ import unicode_literals
import six
@six.python_2_unicode_compatible
class Vendor(object):
"""
Represents a VPS provider.
"""
def __init__(self, name, endpoint):
"""
Initialise a new vendor object.
:param name: The name of the vendor, e.g. "RamNode".
:param endpoint: The hostname of the SolusVM control panel, with
protocol.
"""
self.name = name
self.endpoint = endpoint
def __hash__(self):
"""
Retrieve a hash value for this object.
:return: This object's hash. Identical objects will have an identical
hash.
"""
return hash(self.name)
def __eq__(self, other):
"""
Test whether this vendor is identical to another.
:param other: The object to compare to this one.
:return: True if the objects are identical, false otherwise.
"""
return isinstance(other, self.__class__) and other.name == self.name
def __str__(self):
"""
Generate a human-readable string representation of this vendor.
:return: This host as a friendly string.
"""
return '{0}({1}, {2})'.format(self.__class__.__name__,
self.name,
self.endpoint)
|
from __future__ import unicode_literals
import six
@six.python_2_unicode_compatible
class Vendor(object):
"""
Represents a VPS provider.
"""
def __init__(self, name, endpoint):
"""
Initialise a new vendor object.
:param name: The name of the vendor, e.g. "RamNode".
:param endpoint: The hostname of the SolusVM control panel, with
protocol.
"""
self.name = name
""" The vendor's name, e.g. "RamNode". """
self.endpoint = endpoint
""" The hostname of the SolusVM control panel, with protocol. """
def __hash__(self):
"""
Retrieve a hash value for this object.
:return: This object's hash. Identical objects will have an identical
hash.
"""
return hash(self.name)
def __eq__(self, other):
"""
Test whether this vendor is identical to another.
:param other: The object to compare to this one.
:return: True if the objects are identical, false otherwise.
"""
return isinstance(other, self.__class__) and other.name == self.name
def __str__(self):
"""
Generate a human-readable string representation of this vendor.
:return: This host as a friendly string.
"""
return '{0}({1}, {2})'.format(self.__class__.__name__,
self.name,
self.endpoint)
|
Add documentation to Vendor properties
|
Add documentation to Vendor properties
|
Python
|
mit
|
gebn/beam,gebn/beam
|
from __future__ import unicode_literals
import six
@six.python_2_unicode_compatible
class Vendor(object):
"""
Represents a VPS provider.
"""
def __init__(self, name, endpoint):
"""
Initialise a new vendor object.
:param name: The name of the vendor, e.g. "RamNode".
:param endpoint: The hostname of the SolusVM control panel, with
protocol.
"""
self.name = name
+ """ The vendor's name, e.g. "RamNode". """
self.endpoint = endpoint
+ """ The hostname of the SolusVM control panel, with protocol. """
def __hash__(self):
"""
Retrieve a hash value for this object.
:return: This object's hash. Identical objects will have an identical
hash.
"""
return hash(self.name)
def __eq__(self, other):
"""
Test whether this vendor is identical to another.
:param other: The object to compare to this one.
:return: True if the objects are identical, false otherwise.
"""
return isinstance(other, self.__class__) and other.name == self.name
def __str__(self):
"""
Generate a human-readable string representation of this vendor.
:return: This host as a friendly string.
"""
return '{0}({1}, {2})'.format(self.__class__.__name__,
self.name,
self.endpoint)
|
Add documentation to Vendor properties
|
## Code Before:
from __future__ import unicode_literals
import six
@six.python_2_unicode_compatible
class Vendor(object):
"""
Represents a VPS provider.
"""
def __init__(self, name, endpoint):
"""
Initialise a new vendor object.
:param name: The name of the vendor, e.g. "RamNode".
:param endpoint: The hostname of the SolusVM control panel, with
protocol.
"""
self.name = name
self.endpoint = endpoint
def __hash__(self):
"""
Retrieve a hash value for this object.
:return: This object's hash. Identical objects will have an identical
hash.
"""
return hash(self.name)
def __eq__(self, other):
"""
Test whether this vendor is identical to another.
:param other: The object to compare to this one.
:return: True if the objects are identical, false otherwise.
"""
return isinstance(other, self.__class__) and other.name == self.name
def __str__(self):
"""
Generate a human-readable string representation of this vendor.
:return: This host as a friendly string.
"""
return '{0}({1}, {2})'.format(self.__class__.__name__,
self.name,
self.endpoint)
## Instruction:
Add documentation to Vendor properties
## Code After:
from __future__ import unicode_literals
import six
@six.python_2_unicode_compatible
class Vendor(object):
"""
Represents a VPS provider.
"""
def __init__(self, name, endpoint):
"""
Initialise a new vendor object.
:param name: The name of the vendor, e.g. "RamNode".
:param endpoint: The hostname of the SolusVM control panel, with
protocol.
"""
self.name = name
""" The vendor's name, e.g. "RamNode". """
self.endpoint = endpoint
""" The hostname of the SolusVM control panel, with protocol. """
def __hash__(self):
"""
Retrieve a hash value for this object.
:return: This object's hash. Identical objects will have an identical
hash.
"""
return hash(self.name)
def __eq__(self, other):
"""
Test whether this vendor is identical to another.
:param other: The object to compare to this one.
:return: True if the objects are identical, false otherwise.
"""
return isinstance(other, self.__class__) and other.name == self.name
def __str__(self):
"""
Generate a human-readable string representation of this vendor.
:return: This host as a friendly string.
"""
return '{0}({1}, {2})'.format(self.__class__.__name__,
self.name,
self.endpoint)
|
from __future__ import unicode_literals
import six
@six.python_2_unicode_compatible
class Vendor(object):
"""
Represents a VPS provider.
"""
def __init__(self, name, endpoint):
"""
Initialise a new vendor object.
:param name: The name of the vendor, e.g. "RamNode".
:param endpoint: The hostname of the SolusVM control panel, with
protocol.
"""
self.name = name
+ """ The vendor's name, e.g. "RamNode". """
self.endpoint = endpoint
+ """ The hostname of the SolusVM control panel, with protocol. """
def __hash__(self):
"""
Retrieve a hash value for this object.
:return: This object's hash. Identical objects will have an identical
hash.
"""
return hash(self.name)
def __eq__(self, other):
"""
Test whether this vendor is identical to another.
:param other: The object to compare to this one.
:return: True if the objects are identical, false otherwise.
"""
return isinstance(other, self.__class__) and other.name == self.name
def __str__(self):
"""
Generate a human-readable string representation of this vendor.
:return: This host as a friendly string.
"""
return '{0}({1}, {2})'.format(self.__class__.__name__,
self.name,
self.endpoint)
|
8fce8e72f5ff40e51605f3b14bcde5006f4eaa71
|
molly/utils/i18n.py
|
molly/utils/i18n.py
|
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except Model.DoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except Model.DoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except ObjectDoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except ObjectDoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
Fix bug in exception handling
|
Fix bug in exception handling
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
+ from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
- except Model.DoesNotExist:
+ except ObjectDoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
- except Model.DoesNotExist:
+ except ObjectDoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
Fix bug in exception handling
|
## Code Before:
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except Model.DoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except Model.DoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
## Instruction:
Fix bug in exception handling
## Code After:
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except ObjectDoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except ObjectDoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
+ from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
- except Model.DoesNotExist:
? ^^^ ^^
+ except ObjectDoesNotExist:
? ^^^ ^^
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
- except Model.DoesNotExist:
? ^^^ ^^
+ except ObjectDoesNotExist:
? ^^^ ^^
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
b16d634ee9390864da8de6f8d4e7f9e546c8f772
|
ifs/source/jenkins.py
|
ifs/source/jenkins.py
|
version_cmd = 'java -jar /usr/share/jenkins/jenkins.war --version'
version_re = '(\d\.\d{3})'
depends = ['wget']
install_script = """
wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | apt-key add -
echo "deb http://pkg.jenkins-ci.org/debian binary/" > /etc/apt/sources.list.d/jenkins.list
apt-get update
apt-get install -y jenkins
"""
|
version_cmd = 'java -jar /usr/share/jenkins/jenkins.war --version'
version_re = '(\d\.\d{3})'
depends = ['wget']
install_script = """
wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | apt-key add -
echo "deb http://pkg.jenkins-ci.org/debian binary/" > /etc/apt/sources.list.d/jenkins.list
apt-get update -o Dir::Etc::sourcelist="sources.list.d/jenkins.list" -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0"
apt-get install -y jenkins
"""
|
Update Jenkins source to only update the Jenkins repository
|
Update Jenkins source to only update the Jenkins repository
|
Python
|
isc
|
cbednarski/ifs-python,cbednarski/ifs-python
|
version_cmd = 'java -jar /usr/share/jenkins/jenkins.war --version'
version_re = '(\d\.\d{3})'
depends = ['wget']
install_script = """
wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | apt-key add -
echo "deb http://pkg.jenkins-ci.org/debian binary/" > /etc/apt/sources.list.d/jenkins.list
- apt-get update
+ apt-get update -o Dir::Etc::sourcelist="sources.list.d/jenkins.list" -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0"
apt-get install -y jenkins
"""
|
Update Jenkins source to only update the Jenkins repository
|
## Code Before:
version_cmd = 'java -jar /usr/share/jenkins/jenkins.war --version'
version_re = '(\d\.\d{3})'
depends = ['wget']
install_script = """
wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | apt-key add -
echo "deb http://pkg.jenkins-ci.org/debian binary/" > /etc/apt/sources.list.d/jenkins.list
apt-get update
apt-get install -y jenkins
"""
## Instruction:
Update Jenkins source to only update the Jenkins repository
## Code After:
version_cmd = 'java -jar /usr/share/jenkins/jenkins.war --version'
version_re = '(\d\.\d{3})'
depends = ['wget']
install_script = """
wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | apt-key add -
echo "deb http://pkg.jenkins-ci.org/debian binary/" > /etc/apt/sources.list.d/jenkins.list
apt-get update -o Dir::Etc::sourcelist="sources.list.d/jenkins.list" -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0"
apt-get install -y jenkins
"""
|
version_cmd = 'java -jar /usr/share/jenkins/jenkins.war --version'
version_re = '(\d\.\d{3})'
depends = ['wget']
install_script = """
wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | apt-key add -
echo "deb http://pkg.jenkins-ci.org/debian binary/" > /etc/apt/sources.list.d/jenkins.list
- apt-get update
+ apt-get update -o Dir::Etc::sourcelist="sources.list.d/jenkins.list" -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0"
apt-get install -y jenkins
"""
|
c6ef5bcac4d5daddac97ff30ff18645249928ac0
|
nap/engine.py
|
nap/engine.py
|
import json
try:
import msgpack
except ImportError:
pass
from decimal import Decimal
from datetime import date, datetime, time
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
import json
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
try:
import msgpack
except ImportError:
pass
else:
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
Remove unused imports Only define MsgPackEngine if we can import MsgPack
|
Remove unused imports
Only define MsgPackEngine if we can import MsgPack
|
Python
|
bsd-3-clause
|
MarkusH/django-nap,limbera/django-nap
|
import json
- try:
- import msgpack
- except ImportError:
- pass
- from decimal import Decimal
- from datetime import date, datetime, time
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
+
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
- class MsgPackEngine(Engine):
- CONTENT_TYPES = ['application/x-msgpack',]
- def dumps(self, data):
- return msgpack.dumps(data)
- def loads(self, data):
- return msgpack.loads(data)
+ try:
+ import msgpack
+ except ImportError:
+ pass
+ else:
+ class MsgPackEngine(Engine):
+ CONTENT_TYPES = ['application/x-msgpack',]
+ def dumps(self, data):
+ return msgpack.dumps(data)
+ def loads(self, data):
+ return msgpack.loads(data)
+
|
Remove unused imports Only define MsgPackEngine if we can import MsgPack
|
## Code Before:
import json
try:
import msgpack
except ImportError:
pass
from decimal import Decimal
from datetime import date, datetime, time
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
## Instruction:
Remove unused imports Only define MsgPackEngine if we can import MsgPack
## Code After:
import json
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
try:
import msgpack
except ImportError:
pass
else:
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
import json
- try:
- import msgpack
- except ImportError:
- pass
- from decimal import Decimal
- from datetime import date, datetime, time
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
+
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
+
+ try:
+ import msgpack
+ except ImportError:
+ pass
+ else:
- class MsgPackEngine(Engine):
+ class MsgPackEngine(Engine):
? ++++
- CONTENT_TYPES = ['application/x-msgpack',]
+ CONTENT_TYPES = ['application/x-msgpack',]
? ++++
- def dumps(self, data):
+ def dumps(self, data):
? ++++
- return msgpack.dumps(data)
+ return msgpack.dumps(data)
? ++++
- def loads(self, data):
+ def loads(self, data):
? ++++
- return msgpack.loads(data)
+ return msgpack.loads(data)
? ++++
|
a7c31950cc2ad737176ba2aa91c77c7c649aa8c7
|
shell.py
|
shell.py
|
import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
import sys, os, subprocess
remove_vars = ("PYTHONHOME", "PYTHONPATH", "VERSIONER_PYTHON_PREFER_32_BIT")
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
for var in remove_vars:
if var in env:
del env[var]
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
Remove some Python environment variables from user subprocess environment.
|
Remove some Python environment variables from user subprocess environment.
|
Python
|
mit
|
shaurz/devo
|
import sys, os, subprocess
+
+ remove_vars = ("PYTHONHOME", "PYTHONPATH", "VERSIONER_PYTHON_PREFER_32_BIT")
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
+ for var in remove_vars:
+ if var in env:
+ del env[var]
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
Remove some Python environment variables from user subprocess environment.
|
## Code Before:
import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
## Instruction:
Remove some Python environment variables from user subprocess environment.
## Code After:
import sys, os, subprocess
remove_vars = ("PYTHONHOME", "PYTHONPATH", "VERSIONER_PYTHON_PREFER_32_BIT")
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
for var in remove_vars:
if var in env:
del env[var]
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
import sys, os, subprocess
+
+ remove_vars = ("PYTHONHOME", "PYTHONPATH", "VERSIONER_PYTHON_PREFER_32_BIT")
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
+ for var in remove_vars:
+ if var in env:
+ del env[var]
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
6b179dc4fb95f4db380b9156381b6210adeef2e5
|
conftest.py
|
conftest.py
|
import os
import mock
import pytest
PROJECT = os.environ['GCLOUD_PROJECT']
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
import os
import mock
import pytest
PROJECT = 'python-docs-samples'
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
Set the Project in code
|
Set the Project in code
|
Python
|
apache-2.0
|
GoogleCloudPlatform/getting-started-python,GoogleCloudPlatform/getting-started-python,GoogleCloudPlatform/getting-started-python
|
import os
import mock
import pytest
- PROJECT = os.environ['GCLOUD_PROJECT']
+ PROJECT = 'python-docs-samples'
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
Set the Project in code
|
## Code Before:
import os
import mock
import pytest
PROJECT = os.environ['GCLOUD_PROJECT']
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
## Instruction:
Set the Project in code
## Code After:
import os
import mock
import pytest
PROJECT = 'python-docs-samples'
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
import os
import mock
import pytest
- PROJECT = os.environ['GCLOUD_PROJECT']
+ PROJECT = 'python-docs-samples'
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
573f3fd726c7bf1495bfdfeb2201317abc2949e4
|
src/parser/menu_item.py
|
src/parser/menu_item.py
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
Remove previously added method because finally not used...
|
Remove previously added method because finally not used...
|
Python
|
mit
|
epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
- def target_is_reference(self):
- # If it is not another possibility, it is a reference
- return not self.target_is_sitemap() and \
- not self.target_is_url() and \
- self.target is not None
-
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
Remove previously added method because finally not used...
|
## Code Before:
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
## Instruction:
Remove previously added method because finally not used...
## Code After:
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
- def target_is_reference(self):
- # If it is not another possibility, it is a reference
- return not self.target_is_sitemap() and \
- not self.target_is_url() and \
- self.target is not None
-
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
2e9a6a2babb16f4ed9c3367b21ee28514d1988a8
|
srm/__main__.py
|
srm/__main__.py
|
import click
from . import __version__, status
@click.group()
@click.version_option(__version__)
def cli() -> None:
"""Main command-line entry method."""
cli.add_command(status.cli)
if __name__ == '__main__':
cli()
|
import click
from . import __version__, status
@click.group()
@click.version_option(__version__)
def cli() -> None:
"""Main command-line entry method."""
cli.add_command(status.cli)
cli(prog_name='srm')
|
Set correct program name in 'help' output
|
Set correct program name in 'help' output
|
Python
|
mit
|
cmcginty/simple-rom-manager,cmcginty/simple-rom-manager
|
import click
from . import __version__, status
@click.group()
@click.version_option(__version__)
def cli() -> None:
"""Main command-line entry method."""
cli.add_command(status.cli)
+ cli(prog_name='srm')
- if __name__ == '__main__':
- cli()
-
|
Set correct program name in 'help' output
|
## Code Before:
import click
from . import __version__, status
@click.group()
@click.version_option(__version__)
def cli() -> None:
"""Main command-line entry method."""
cli.add_command(status.cli)
if __name__ == '__main__':
cli()
## Instruction:
Set correct program name in 'help' output
## Code After:
import click
from . import __version__, status
@click.group()
@click.version_option(__version__)
def cli() -> None:
"""Main command-line entry method."""
cli.add_command(status.cli)
cli(prog_name='srm')
|
import click
from . import __version__, status
@click.group()
@click.version_option(__version__)
def cli() -> None:
"""Main command-line entry method."""
cli.add_command(status.cli)
+ cli(prog_name='srm')
-
- if __name__ == '__main__':
- cli()
|
3a8d7ff5f047c7b3476b8dcffa0e6850e952a645
|
docs/examples/http_proxy/set_http_proxy_method.py
|
docs/examples/http_proxy/set_http_proxy_method.py
|
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
|
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.connection.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
|
Fix a typo in the example.
|
Fix a typo in the example.
|
Python
|
apache-2.0
|
kater169/libcloud,DimensionDataCBUSydney/libcloud,t-tran/libcloud,Scalr/libcloud,MrBasset/libcloud,watermelo/libcloud,curoverse/libcloud,Kami/libcloud,SecurityCompass/libcloud,Kami/libcloud,pantheon-systems/libcloud,andrewsomething/libcloud,schaubl/libcloud,pantheon-systems/libcloud,jimbobhickville/libcloud,munkiat/libcloud,iPlantCollaborativeOpenSource/libcloud,schaubl/libcloud,Kami/libcloud,JamesGuthrie/libcloud,sahildua2305/libcloud,jimbobhickville/libcloud,iPlantCollaborativeOpenSource/libcloud,aleGpereira/libcloud,mgogoulos/libcloud,SecurityCompass/libcloud,curoverse/libcloud,munkiat/libcloud,sfriesel/libcloud,mbrukman/libcloud,smaffulli/libcloud,mistio/libcloud,niteoweb/libcloud,briancurtin/libcloud,supertom/libcloud,sergiorua/libcloud,cryptickp/libcloud,watermelo/libcloud,vongazman/libcloud,sergiorua/libcloud,samuelchong/libcloud,sfriesel/libcloud,StackPointCloud/libcloud,JamesGuthrie/libcloud,thesquelched/libcloud,cloudControl/libcloud,lochiiconnectivity/libcloud,DimensionDataCBUSydney/libcloud,aviweit/libcloud,t-tran/libcloud,thesquelched/libcloud,jerryblakley/libcloud,techhat/libcloud,cryptickp/libcloud,MrBasset/libcloud,ZuluPro/libcloud,ByteInternet/libcloud,Verizon/libcloud,mbrukman/libcloud,wrigri/libcloud,jimbobhickville/libcloud,Verizon/libcloud,cloudControl/libcloud,wuyuewen/libcloud,iPlantCollaborativeOpenSource/libcloud,mbrukman/libcloud,sahildua2305/libcloud,niteoweb/libcloud,kater169/libcloud,lochiiconnectivity/libcloud,atsaki/libcloud,curoverse/libcloud,smaffulli/libcloud,apache/libcloud,erjohnso/libcloud,mistio/libcloud,apache/libcloud,marcinzaremba/libcloud,ZuluPro/libcloud,ByteInternet/libcloud,mathspace/libcloud,dcorbacho/libcloud,marcinzaremba/libcloud,wido/libcloud,pantheon-systems/libcloud,Itxaka/libcloud,dcorbacho/libcloud,mtekel/libcloud,schaubl/libcloud,munkiat/libcloud,Itxaka/libcloud,mathspace/libcloud,Scalr/libcloud,DimensionDataCBUSydney/libcloud,jerryblakley/libcloud,Cloud-Elasticity-Services/as-libcloud,andrewsomething/libcloud,Itxaka/libcloud,NexusIS/libcloud,atsaki/libcloud,Cloud-Elasticity-Services/as-libcloud,jerryblakley/libcloud,techhat/libcloud,aleGpereira/libcloud,techhat/libcloud,samuelchong/libcloud,supertom/libcloud,cloudControl/libcloud,niteoweb/libcloud,sfriesel/libcloud,mgogoulos/libcloud,t-tran/libcloud,Verizon/libcloud,marcinzaremba/libcloud,carletes/libcloud,wuyuewen/libcloud,samuelchong/libcloud,cryptickp/libcloud,mathspace/libcloud,thesquelched/libcloud,mtekel/libcloud,wrigri/libcloud,sergiorua/libcloud,vongazman/libcloud,carletes/libcloud,smaffulli/libcloud,vongazman/libcloud,mistio/libcloud,sahildua2305/libcloud,wuyuewen/libcloud,dcorbacho/libcloud,illfelder/libcloud,lochiiconnectivity/libcloud,atsaki/libcloud,aviweit/libcloud,pquentin/libcloud,carletes/libcloud,ZuluPro/libcloud,wido/libcloud,ByteInternet/libcloud,briancurtin/libcloud,pquentin/libcloud,mgogoulos/libcloud,illfelder/libcloud,apache/libcloud,aviweit/libcloud,erjohnso/libcloud,andrewsomething/libcloud,Scalr/libcloud,aleGpereira/libcloud,MrBasset/libcloud,mtekel/libcloud,pquentin/libcloud,watermelo/libcloud,supertom/libcloud,StackPointCloud/libcloud,Cloud-Elasticity-Services/as-libcloud,NexusIS/libcloud,briancurtin/libcloud,erjohnso/libcloud,JamesGuthrie/libcloud,SecurityCompass/libcloud,wrigri/libcloud,StackPointCloud/libcloud,kater169/libcloud,NexusIS/libcloud,wido/libcloud,illfelder/libcloud
|
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
- driver.set_http_proxy(proxy_url=PROXY_URL)
+ driver.connection.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
|
Fix a typo in the example.
|
## Code Before:
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
## Instruction:
Fix a typo in the example.
## Code After:
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
driver.connection.set_http_proxy(proxy_url=PROXY_URL)
pprint(driver.list_nodes())
|
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL = 'http://<proxy hostname>:<proxy port>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord')
- driver.set_http_proxy(proxy_url=PROXY_URL)
+ driver.connection.set_http_proxy(proxy_url=PROXY_URL)
? +++++++++++
pprint(driver.list_nodes())
|
e78b3f53150a5f1c170b860f8719e982cf1c6f9e
|
integration/main.py
|
integration/main.py
|
import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
|
import os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
Fix state bleed, add fixture import test
|
Fix state bleed, add fixture import test
|
Python
|
apache-2.0
|
section-io/tessera,urbanairship/tessera,tessera-metrics/tessera,jmptrader/tessera,section-io/tessera,urbanairship/tessera,aalpern/tessera,section-io/tessera,jmptrader/tessera,filippog/tessera,aalpern/tessera,aalpern/tessera,Slach/tessera,Slach/tessera,urbanairship/tessera,urbanairship/tessera,Slach/tessera,jmptrader/tessera,jmptrader/tessera,tessera-metrics/tessera,aalpern/tessera,section-io/tessera,urbanairship/tessera,tessera-metrics/tessera,jmptrader/tessera,tessera-metrics/tessera,Slach/tessera,filippog/tessera,filippog/tessera,aalpern/tessera,tessera-metrics/tessera
|
import os
import sys
- from spec import Spec, skip
+ from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
+ from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
+ # Ensure no cached session crap
+ db.session.close_all()
+
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
- assert len(Dashboard.query.all()) == 0
+ eq_(len(Dashboard.query.all()), 0)
+ def can_import_fixtures(self):
+ from tessera.application import db
+ from tessera.importer.json import JsonImporter
+ from tessera.model.database import Dashboard
+ db.create_all()
+ path = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
+ ))
+ JsonImporter.import_file(path)
+ eq_(len(Dashboard.query.all()), 1)
+
|
Fix state bleed, add fixture import test
|
## Code Before:
import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
## Instruction:
Fix state bleed, add fixture import test
## Code After:
import os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
import os
import sys
- from spec import Spec, skip
+ from spec import Spec, skip, eq_
? +++++
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
+ from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
+ # Ensure no cached session crap
+ db.session.close_all()
+
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
- assert len(Dashboard.query.all()) == 0
? --- ^^^ ^^^
+ eq_(len(Dashboard.query.all()), 0)
? ^^^ ^ +
+
+ def can_import_fixtures(self):
+ from tessera.application import db
+ from tessera.importer.json import JsonImporter
+ from tessera.model.database import Dashboard
+ db.create_all()
+ path = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
+ ))
+ JsonImporter.import_file(path)
+ eq_(len(Dashboard.query.all()), 1)
|
0dac29f30853498f6e9d82c8b791ced5ec21667c
|
models/00_settings.py
|
models/00_settings.py
|
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
if os.environ['POSTGRES_ENABLED'] == 'True':
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
if app_config.has_key('postgres'):
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
Check configuration file rather than env variable
|
Check configuration file rather than env variable
|
Python
|
apache-2.0
|
wefner/w2pfooty,wefner/w2pfooty,wefner/w2pfooty
|
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
- if os.environ['POSTGRES_ENABLED'] == 'True':
+ if app_config.has_key('postgres'):
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
Check configuration file rather than env variable
|
## Code Before:
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
if os.environ['POSTGRES_ENABLED'] == 'True':
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
## Instruction:
Check configuration file rather than env variable
## Code After:
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
if app_config.has_key('postgres'):
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
import os
import logging
import json
from logging.config import dictConfig
from gluon.storage import Storage
from gluon.contrib.appconfig import AppConfig
# app_config use to cache values in production
app_config = AppConfig(reload=True)
# settings is used to avoid cached values in production
settings = Storage()
# LOGGING CONFIGURATIONS
settings.logging_config = dict(main=os.path.join(request.folder,
'logging.json'),
scheduler=os.path.join(request.folder,
'logging-scheduler.json'))
# INITIALIZE LOGGING
if os.path.exists(settings.logging_config['main']):
try:
config = json.loads(open(settings.logging_config['main']).read())
logging.config.dictConfig(config)
except ValueError as e:
pass
logger = logging.getLogger(settings.app_name)
# DATABASE CONFIGURATION
# Check whether POSTGRES_ENABLED env var is set to True or not.
# If so, generate connection string.
- if os.environ['POSTGRES_ENABLED'] == 'True':
+ if app_config.has_key('postgres'):
settings.db_uri = 'postgres://{u}:{p}@{h}:{po}/{db}'.format(
u=app_config.get('postgres.username'),
p=app_config.get('postgres.password'),
h=app_config.get('postgres.hostname'),
po=app_config.get('postgres.port'),
db=app_config.get('postgres.database'))
else:
settings.db_uri = app_config.get('db.uri')
|
247c4dcaf3e1c1f9c069ab8a2fc06cfcd75f8ea9
|
UM/Util.py
|
UM/Util.py
|
def parseBool(value):
return value in [True, "True", "true", 1]
|
def parseBool(value):
return value in [True, "True", "true", "Yes", "yes", 1]
|
Add "Yes" as an option for parsing bools
|
Add "Yes" as an option for parsing bools
CURA-2204
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
def parseBool(value):
- return value in [True, "True", "true", 1]
+ return value in [True, "True", "true", "Yes", "yes", 1]
|
Add "Yes" as an option for parsing bools
|
## Code Before:
def parseBool(value):
return value in [True, "True", "true", 1]
## Instruction:
Add "Yes" as an option for parsing bools
## Code After:
def parseBool(value):
return value in [True, "True", "true", "Yes", "yes", 1]
|
def parseBool(value):
- return value in [True, "True", "true", 1]
+ return value in [True, "True", "true", "Yes", "yes", 1]
? ++++++++++++++
|
727702d6d5cf8d43ac9c4f8011ff2b6d78cfbe4c
|
account_constraints/model/account_move.py
|
account_constraints/model/account_move.py
|
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
|
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
Use constraint decorator on account_constraints+
|
[IMP] Use constraint decorator on account_constraints+
|
Python
|
agpl-3.0
|
pedrobaeza/account-financial-tools,VitalPet/account-financial-tools,amoya-dx/account-financial-tools,VitalPet/account-financial-tools,credativUK/account-financial-tools,lepistone/account-financial-tools,damdam-s/account-financial-tools,acsone/account-financial-tools,Pexego/account-financial-tools,raycarnes/account-financial-tools,cysnake4713/account-financial-tools,charbeljc/account-financial-tools,raycarnes/account-financial-tools,taktik/account-financial-tools,OpenPymeMx/account-financial-tools,akretion/account-financial-tools,pedrobaeza/account-financial-tools,adhoc-dev/oca-account-financial-tools,dvitme/account-financial-tools,adhoc-dev/oca-account-financial-tools,luc-demeyer/account-financial-tools,ClearCorp-dev/account-financial-tools,Endika/account-financial-tools,nagyv/account-financial-tools,andrius-preimantas/account-financial-tools,VitalPet/account-financial-tools,Nowheresly/account-financial-tools,andhit-r/account-financial-tools,factorlibre/account-financial-tools,abstract-open-solutions/account-financial-tools,iDTLabssl/account-financial-tools,credativUK/account-financial-tools,Antiun/account-financial-tools,OpenPymeMx/account-financial-tools,yelizariev/account-financial-tools,alhashash/account-financial-tools,diagramsoftware/account-financial-tools,abstract-open-solutions/account-financial-tools,syci/account-financial-tools,Endika/account-financial-tools,Pexego/account-financial-tools,charbeljc/account-financial-tools,open-synergy/account-financial-tools,damdam-s/account-financial-tools,amoya-dx/account-financial-tools,bringsvor/account-financial-tools,Domatix/account-financial-tools,xpansa/account-financial-tools,bringsvor/account-financial-tools,open-synergy/account-financial-tools,nagyv/account-financial-tools,OpenPymeMx/account-financial-tools,acsone/account-financial-tools,syci/account-financial-tools,yelizariev/account-financial-tools,lepistone/account-financial-tools,andrius-preimantas/account-financial-tools,Nowheresly/account-financial-tools,Domatix/account-financial-tools,andhit-r/account-financial-tools,akretion/account-financial-tools,open-synergy/account-financial-tools,DarkoNikolovski/account-financial-tools,cysnake4713/account-financial-tools,dvitme/account-financial-tools,Antiun/account-financial-tools,factorlibre/account-financial-tools,luc-demeyer/account-financial-tools,iDTLabssl/account-financial-tools,acsone/account-financial-tools,DarkoNikolovski/account-financial-tools,Domatix/account-financial-tools,taktik/account-financial-tools,diagramsoftware/account-financial-tools,xpansa/account-financial-tools,alhashash/account-financial-tools,ClearCorp-dev/account-financial-tools
|
- from openerp import models, api
+ from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
- @api.multi
+ @api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
- return False
+ raise exceptions.Warning(
+ _('You cannot create entries with date not in the '
+ 'fiscal year of the chosen period'))
return True
- _constraints = [
- (_check_fiscal_year,
- 'You cannot create entries with date not in the '
- 'fiscal year of the chosen period',
- ['line_id']),
- ]
-
|
Use constraint decorator on account_constraints+
|
## Code Before:
from openerp import models, api
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
return False
return True
_constraints = [
(_check_fiscal_year,
'You cannot create entries with date not in the '
'fiscal year of the chosen period',
['line_id']),
]
## Instruction:
Use constraint decorator on account_constraints+
## Code After:
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
|
- from openerp import models, api
+ from openerp import models, api, exceptions, _
? +++++++++++++++
class AccountMove(models.Model):
_inherit = "account.move"
- @api.multi
+ @api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
- return False
+ raise exceptions.Warning(
+ _('You cannot create entries with date not in the '
+ 'fiscal year of the chosen period'))
return True
-
- _constraints = [
- (_check_fiscal_year,
- 'You cannot create entries with date not in the '
- 'fiscal year of the chosen period',
- ['line_id']),
- ]
|
46a0caa1bc162d11b26a996379170b2fc49f2940
|
mcbench/client.py
|
mcbench/client.py
|
import collections
import redis
BENCHMARK_FIELDS = [
'author', 'author_url', 'date_submitted', 'date_updated',
'name', 'summary', 'tags', 'title', 'url'
]
Benchmark = collections.namedtuple('Benchmark', ' '.join(BENCHMARK_FIELDS))
class McBenchClient(object):
def __init__(self, redis):
self.redis = redis
def get_benchmark_by_id(self, benchmark_id):
return Benchmark(**self.redis.hgetall('benchmark:%s' % benchmark_id))
def get_benchmark_by_name(self, name):
benchmark_id = self.redis.get('benchmark:%s:id', name)
return self.get_benchmark_by_id(benchmark_id)
def insert_benchmark(self, benchmark):
benchmark_id = self.redis.incr('global:next_benchmark_id')
self.redis.set('benchmark:%s:id' % benchmark.name, benchmark_id)
self.redis.hmset('benchmark:%s' % benchmark_id, benchmark._asdict())
def from_redis_url(redis_url):
return McBenchClient(redis.from_url(redis_url))
|
import redis
class Benchmark(object):
def __init__(self, author, author_url, date_submitted, date_updated,
name, summary, tags, title, url):
self.author = author
self.author_url = author_url
self.date_submitted = date_submitted
self.date_updated = date_updated
self.name = name
self.summary = summary
self.tags = tags
self.title = title
self.url = url
def __repr__(self):
return '<Benchmark: %s>' % self.name
class BenchmarkDoesNotExist(Exception):
pass
class BenchmarkAlreadyExists(Exception):
pass
class McBenchClient(object):
def __init__(self, redis):
self.redis = redis
def get_benchmark_by_id(self, benchmark_id):
data = self.redis.hgetall('benchmark:%s' % benchmark_id)
if not data:
raise BenchmarkDoesNotExist
return Benchmark(**data)
def get_benchmark_by_name(self, name):
benchmark_id = self.redis.get('name:%s:id' % name)
if benchmark_id is None:
raise BenchmarkDoesNotExist
return self.get_benchmark_by_id(benchmark_id)
def get_all_benchmarks(self):
return [self.get_benchmark_by_id(key[len('benchmark:'):])
for key in self.redis.keys('benchmark:*')]
def insert_benchmark(self, benchmark):
benchmark_id = self.redis.get('name:%s:id' % benchmark.name)
if benchmark_id is not None:
raise BenchmarkAlreadyExists
benchmark_id = self.redis.incr('global:next_benchmark_id')
self.redis.set('name:%s:id' % benchmark.name, benchmark_id)
self.redis.hmset('benchmark:%s' % benchmark_id, vars(benchmark))
def from_redis_url(redis_url):
return McBenchClient(redis.from_url(redis_url))
|
Make Benchmark a class, not a namedtuple.
|
Make Benchmark a class, not a namedtuple.
|
Python
|
mit
|
isbadawi/mcbench,isbadawi/mcbench
|
- import collections
-
import redis
- BENCHMARK_FIELDS = [
- 'author', 'author_url', 'date_submitted', 'date_updated',
- 'name', 'summary', 'tags', 'title', 'url'
- ]
- Benchmark = collections.namedtuple('Benchmark', ' '.join(BENCHMARK_FIELDS))
+ class Benchmark(object):
+ def __init__(self, author, author_url, date_submitted, date_updated,
+ name, summary, tags, title, url):
+ self.author = author
+ self.author_url = author_url
+ self.date_submitted = date_submitted
+ self.date_updated = date_updated
+ self.name = name
+ self.summary = summary
+ self.tags = tags
+ self.title = title
+ self.url = url
+
+ def __repr__(self):
+ return '<Benchmark: %s>' % self.name
+
+
+ class BenchmarkDoesNotExist(Exception):
+ pass
+
+
+ class BenchmarkAlreadyExists(Exception):
+ pass
class McBenchClient(object):
def __init__(self, redis):
self.redis = redis
def get_benchmark_by_id(self, benchmark_id):
- return Benchmark(**self.redis.hgetall('benchmark:%s' % benchmark_id))
+ data = self.redis.hgetall('benchmark:%s' % benchmark_id)
+ if not data:
+ raise BenchmarkDoesNotExist
+ return Benchmark(**data)
def get_benchmark_by_name(self, name):
- benchmark_id = self.redis.get('benchmark:%s:id', name)
+ benchmark_id = self.redis.get('name:%s:id' % name)
+ if benchmark_id is None:
+ raise BenchmarkDoesNotExist
return self.get_benchmark_by_id(benchmark_id)
+ def get_all_benchmarks(self):
+ return [self.get_benchmark_by_id(key[len('benchmark:'):])
+ for key in self.redis.keys('benchmark:*')]
+
def insert_benchmark(self, benchmark):
+ benchmark_id = self.redis.get('name:%s:id' % benchmark.name)
+ if benchmark_id is not None:
+ raise BenchmarkAlreadyExists
benchmark_id = self.redis.incr('global:next_benchmark_id')
- self.redis.set('benchmark:%s:id' % benchmark.name, benchmark_id)
+ self.redis.set('name:%s:id' % benchmark.name, benchmark_id)
- self.redis.hmset('benchmark:%s' % benchmark_id, benchmark._asdict())
+ self.redis.hmset('benchmark:%s' % benchmark_id, vars(benchmark))
def from_redis_url(redis_url):
return McBenchClient(redis.from_url(redis_url))
|
Make Benchmark a class, not a namedtuple.
|
## Code Before:
import collections
import redis
BENCHMARK_FIELDS = [
'author', 'author_url', 'date_submitted', 'date_updated',
'name', 'summary', 'tags', 'title', 'url'
]
Benchmark = collections.namedtuple('Benchmark', ' '.join(BENCHMARK_FIELDS))
class McBenchClient(object):
def __init__(self, redis):
self.redis = redis
def get_benchmark_by_id(self, benchmark_id):
return Benchmark(**self.redis.hgetall('benchmark:%s' % benchmark_id))
def get_benchmark_by_name(self, name):
benchmark_id = self.redis.get('benchmark:%s:id', name)
return self.get_benchmark_by_id(benchmark_id)
def insert_benchmark(self, benchmark):
benchmark_id = self.redis.incr('global:next_benchmark_id')
self.redis.set('benchmark:%s:id' % benchmark.name, benchmark_id)
self.redis.hmset('benchmark:%s' % benchmark_id, benchmark._asdict())
def from_redis_url(redis_url):
return McBenchClient(redis.from_url(redis_url))
## Instruction:
Make Benchmark a class, not a namedtuple.
## Code After:
import redis
class Benchmark(object):
def __init__(self, author, author_url, date_submitted, date_updated,
name, summary, tags, title, url):
self.author = author
self.author_url = author_url
self.date_submitted = date_submitted
self.date_updated = date_updated
self.name = name
self.summary = summary
self.tags = tags
self.title = title
self.url = url
def __repr__(self):
return '<Benchmark: %s>' % self.name
class BenchmarkDoesNotExist(Exception):
pass
class BenchmarkAlreadyExists(Exception):
pass
class McBenchClient(object):
def __init__(self, redis):
self.redis = redis
def get_benchmark_by_id(self, benchmark_id):
data = self.redis.hgetall('benchmark:%s' % benchmark_id)
if not data:
raise BenchmarkDoesNotExist
return Benchmark(**data)
def get_benchmark_by_name(self, name):
benchmark_id = self.redis.get('name:%s:id' % name)
if benchmark_id is None:
raise BenchmarkDoesNotExist
return self.get_benchmark_by_id(benchmark_id)
def get_all_benchmarks(self):
return [self.get_benchmark_by_id(key[len('benchmark:'):])
for key in self.redis.keys('benchmark:*')]
def insert_benchmark(self, benchmark):
benchmark_id = self.redis.get('name:%s:id' % benchmark.name)
if benchmark_id is not None:
raise BenchmarkAlreadyExists
benchmark_id = self.redis.incr('global:next_benchmark_id')
self.redis.set('name:%s:id' % benchmark.name, benchmark_id)
self.redis.hmset('benchmark:%s' % benchmark_id, vars(benchmark))
def from_redis_url(redis_url):
return McBenchClient(redis.from_url(redis_url))
|
- import collections
-
import redis
- BENCHMARK_FIELDS = [
- 'author', 'author_url', 'date_submitted', 'date_updated',
- 'name', 'summary', 'tags', 'title', 'url'
- ]
- Benchmark = collections.namedtuple('Benchmark', ' '.join(BENCHMARK_FIELDS))
+ class Benchmark(object):
+ def __init__(self, author, author_url, date_submitted, date_updated,
+ name, summary, tags, title, url):
+ self.author = author
+ self.author_url = author_url
+ self.date_submitted = date_submitted
+ self.date_updated = date_updated
+ self.name = name
+ self.summary = summary
+ self.tags = tags
+ self.title = title
+ self.url = url
+
+ def __repr__(self):
+ return '<Benchmark: %s>' % self.name
+
+
+ class BenchmarkDoesNotExist(Exception):
+ pass
+
+
+ class BenchmarkAlreadyExists(Exception):
+ pass
class McBenchClient(object):
def __init__(self, redis):
self.redis = redis
def get_benchmark_by_id(self, benchmark_id):
- return Benchmark(**self.redis.hgetall('benchmark:%s' % benchmark_id))
? ^^ ^^^ ^^^^^^^^^^^^ -
+ data = self.redis.hgetall('benchmark:%s' % benchmark_id)
? ^^ ^ ^^
+ if not data:
+ raise BenchmarkDoesNotExist
+ return Benchmark(**data)
def get_benchmark_by_name(self, name):
- benchmark_id = self.redis.get('benchmark:%s:id', name)
? ^ ------- ^
+ benchmark_id = self.redis.get('name:%s:id' % name)
? ^^^ ^^
+ if benchmark_id is None:
+ raise BenchmarkDoesNotExist
return self.get_benchmark_by_id(benchmark_id)
+ def get_all_benchmarks(self):
+ return [self.get_benchmark_by_id(key[len('benchmark:'):])
+ for key in self.redis.keys('benchmark:*')]
+
def insert_benchmark(self, benchmark):
+ benchmark_id = self.redis.get('name:%s:id' % benchmark.name)
+ if benchmark_id is not None:
+ raise BenchmarkAlreadyExists
benchmark_id = self.redis.incr('global:next_benchmark_id')
- self.redis.set('benchmark:%s:id' % benchmark.name, benchmark_id)
? ^ -------
+ self.redis.set('name:%s:id' % benchmark.name, benchmark_id)
? ^^^
- self.redis.hmset('benchmark:%s' % benchmark_id, benchmark._asdict())
? ---------
+ self.redis.hmset('benchmark:%s' % benchmark_id, vars(benchmark))
? +++++
def from_redis_url(redis_url):
return McBenchClient(redis.from_url(redis_url))
|
6d57372c270d980e0f7d662a60195e54f88b9be5
|
web/gunicorn.conf.py
|
web/gunicorn.conf.py
|
import os
import multiprocessing
proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME'])
user = 'www-data'
group = 'www-data'
bind = '0.0.0.0:80'
workers = multiprocessing.cpu_count() * 2 + 1
threads = workers
|
import os
import multiprocessing
proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME'])
user = 'www-data'
group = 'www-data'
bind = '0.0.0.0:80'
workers = multiprocessing.cpu_count() * 2 + 1
threads = workers
accesslog = '-'
errorlog = '-'
|
Make gunicorn log to stdout
|
Make gunicorn log to stdout
|
Python
|
mit
|
slava-sh/messenger,slava-sh/messenger,slava-sh/messenger,slava-sh/messenger
|
import os
import multiprocessing
proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME'])
user = 'www-data'
group = 'www-data'
bind = '0.0.0.0:80'
workers = multiprocessing.cpu_count() * 2 + 1
threads = workers
+ accesslog = '-'
+ errorlog = '-'
|
Make gunicorn log to stdout
|
## Code Before:
import os
import multiprocessing
proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME'])
user = 'www-data'
group = 'www-data'
bind = '0.0.0.0:80'
workers = multiprocessing.cpu_count() * 2 + 1
threads = workers
## Instruction:
Make gunicorn log to stdout
## Code After:
import os
import multiprocessing
proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME'])
user = 'www-data'
group = 'www-data'
bind = '0.0.0.0:80'
workers = multiprocessing.cpu_count() * 2 + 1
threads = workers
accesslog = '-'
errorlog = '-'
|
import os
import multiprocessing
proc_name = 'gunicorn: {}'.format(os.environ['WEB_HOSTNAME'])
user = 'www-data'
group = 'www-data'
bind = '0.0.0.0:80'
workers = multiprocessing.cpu_count() * 2 + 1
threads = workers
+ accesslog = '-'
+ errorlog = '-'
|
2ee34d2d74a8fb41dfe49cd3933d0d7abb25fee4
|
rsvp/admin.py
|
rsvp/admin.py
|
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
list_display = ['last_name', 'first_name']
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
list_display = ['last_name', 'first_name', 'attending', ]
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
Add attending as column to Guest
|
Add attending as column to Guest
|
Python
|
mit
|
gboone/wedding.harmsboone.org,gboone/wedding.harmsboone.org
|
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
- list_display = ['last_name', 'first_name']
+ list_display = ['last_name', 'first_name', 'attending', ]
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
Add attending as column to Guest
|
## Code Before:
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
list_display = ['last_name', 'first_name']
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
## Instruction:
Add attending as column to Guest
## Code After:
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
list_display = ['last_name', 'first_name', 'attending', ]
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
- list_display = ['last_name', 'first_name']
+ list_display = ['last_name', 'first_name', 'attending', ]
? +++++++++++++++
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
4324257e5fe1c49281e4844b07d222b68bd45287
|
avalon/fusion/lib.py
|
avalon/fusion/lib.py
|
import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
|
import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
|
Fix doctest - not sure why it was failing on the quotation marks
|
Fix doctest - not sure why it was failing on the quotation marks
|
Python
|
mit
|
getavalon/core,mindbender-studio/core,mindbender-studio/core,getavalon/core
|
import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
- ("C:/test", 4, ".exr")
+ ('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
- ("filename.", 2, ".tif")
+ ('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
- ("foobar", 2, ".tif")
+ ('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
|
Fix doctest - not sure why it was failing on the quotation marks
|
## Code Before:
import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
## Instruction:
Fix doctest - not sure why it was failing on the quotation marks
## Code After:
import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
|
import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
- ("C:/test", 4, ".exr")
? ^ ^ ^ ^
+ ('C:/test', 4, '.exr')
? ^ ^ ^ ^
>>> get_frame_path("filename.00.tif")
- ("filename.", 2, ".tif")
? ^ ^ ^ ^
+ ('filename.', 2, '.tif')
? ^ ^ ^ ^
>>> get_frame_path("foobar35.tif")
- ("foobar", 2, ".tif")
? ^ ^ ^ ^
+ ('foobar', 2, '.tif')
? ^ ^ ^ ^
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
|
80ca0bebce22f64d0d01377493126ed95d8a64cb
|
falcom/luhn.py
|
falcom/luhn.py
|
def get_check_digit_from_checkable_int (number):
return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
def get_check_digit_from_checkable_int (number):
return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
Reorder methods to make sense
|
Reorder methods to make sense
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
-
- def get_check_digit_from_checkable_int (number):
- return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
+ def get_check_digit_from_checkable_int (number):
+ return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
+
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
Reorder methods to make sense
|
## Code Before:
def get_check_digit_from_checkable_int (number):
return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
## Instruction:
Reorder methods to make sense
## Code After:
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
def get_check_digit_from_checkable_int (number):
return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
-
- def get_check_digit_from_checkable_int (number):
- return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
+ def get_check_digit_from_checkable_int (number):
+ return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
+
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
6ad4796030aab2f6dbf8389b4030007d0fcf8761
|
panoptes/test/mount/test_ioptron.py
|
panoptes/test/mount/test_ioptron.py
|
from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
def test_config_no_commands(self):
""" """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands=dict())
|
from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
def test_config_bad_commands(self):
""" Passes in a default config but blank commands, which should error """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'})
def test_config_auto_commands(self):
""" Passes in config like above, but no commands, so they should read from defaults """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
|
Update to test for mount setup
|
Update to test for mount setup
|
Python
|
mit
|
Guokr1991/POCS,joshwalawender/POCS,AstroHuntsman/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,Guokr1991/POCS,Guokr1991/POCS,Guokr1991/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,joshwalawender/POCS,AstroHuntsman/POCS,fmin2958/POCS,joshwalawender/POCS,fmin2958/POCS,fmin2958/POCS
|
from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
+ def test_config_bad_commands(self):
+ """ Passes in a default config but blank commands, which should error """
+ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'})
+
- def test_config_no_commands(self):
+ def test_config_auto_commands(self):
- """ """
+ """ Passes in config like above, but no commands, so they should read from defaults """
- mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands=dict())
+ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
|
Update to test for mount setup
|
## Code Before:
from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
def test_config_no_commands(self):
""" """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands=dict())
## Instruction:
Update to test for mount setup
## Code After:
from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
def test_config_bad_commands(self):
""" Passes in a default config but blank commands, which should error """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'})
def test_config_auto_commands(self):
""" Passes in config like above, but no commands, so they should read from defaults """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
|
from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
+ def test_config_bad_commands(self):
+ """ Passes in a default config but blank commands, which should error """
+ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'})
+
- def test_config_no_commands(self):
? ^
+ def test_config_auto_commands(self):
? ^^^
- """ """
+ """ Passes in config like above, but no commands, so they should read from defaults """
- mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands=dict())
? ---------------- -
+ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
|
fa55a1a93dd53023159c4a21963361d9678e52cf
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
Remove reference to inexistent file.
|
Remove reference to inexistent file.
|
Python
|
isc
|
hobarrera/envsettings,hobarrera/envsettings
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
packages=['envsettings'],
- package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
Remove reference to inexistent file.
|
## Code Before:
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
## Instruction:
Remove reference to inexistent file.
## Code After:
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
packages=['envsettings'],
- package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
1e73195e33c384605072e36ac1551bd6d67ba7cb
|
QGL/BasicSequences/__init__.py
|
QGL/BasicSequences/__init__.py
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
Add a helper function to create calibration sequences.
|
Add a helper function to create calibration sequences.
|
Python
|
apache-2.0
|
Plourde-Research-Lab/PyQLab,calebjordan/PyQLab,BBN-Q/PyQLab,rmcgurrin/PyQLab
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
+
+
+
+
+ from itertools import product
+ import operator
+ from ..PulsePrimitives import Id, X
+
+ def create_cal_seqs(qubits, numCals):
+ """
+ Helper function to create a set of calibration sequences.
+ """
+ calSet = [Id, X]
+ calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
+ return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
+
|
Add a helper function to create calibration sequences.
|
## Code Before:
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
## Instruction:
Add a helper function to create calibration sequences.
## Code After:
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
+
+
+
+
+ from itertools import product
+ import operator
+ from ..PulsePrimitives import Id, X
+
+ def create_cal_seqs(qubits, numCals):
+ """
+ Helper function to create a set of calibration sequences.
+ """
+ calSet = [Id, X]
+ calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
+ return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
54c4e434276b242de56529e63bb6c5c61d891412
|
indico/modules/events/surveys/tasks.py
|
indico/modules/events/surveys/tasks.py
|
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
Survey.notifications_enabled)
try:
for survey in opened_surveys:
survey.send_start_notification()
finally:
db.session.commit()
|
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
try:
for survey in active_surveys:
survey.send_start_notification()
finally:
db.session.commit()
|
Use safer condition for survey start notification
|
Use safer condition for survey start notification
|
Python
|
mit
|
mvidalgarcia/indico,ThiefMaster/indico,pferreir/indico,indico/indico,mic4ael/indico,DirkHoffmann/indico,ThiefMaster/indico,indico/indico,ThiefMaster/indico,indico/indico,mic4ael/indico,indico/indico,OmeGak/indico,OmeGak/indico,pferreir/indico,DirkHoffmann/indico,mic4ael/indico,DirkHoffmann/indico,OmeGak/indico,DirkHoffmann/indico,mic4ael/indico,mvidalgarcia/indico,pferreir/indico,ThiefMaster/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,OmeGak/indico
|
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
- opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
+ active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
- Survey.notifications_enabled)
try:
- for survey in opened_surveys:
+ for survey in active_surveys:
survey.send_start_notification()
finally:
db.session.commit()
|
Use safer condition for survey start notification
|
## Code Before:
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
Survey.notifications_enabled)
try:
for survey in opened_surveys:
survey.send_start_notification()
finally:
db.session.commit()
## Instruction:
Use safer condition for survey start notification
## Code After:
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
try:
for survey in active_surveys:
survey.send_start_notification()
finally:
db.session.commit()
|
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
- opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
? ^^ --- - ^ ----- ^ ^^ ^^ ^
+ active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
? ^^^^^ ^^^^^ ^^^^^^^ ++++ ^^ ^^ ^
- Survey.notifications_enabled)
try:
- for survey in opened_surveys:
? ^^ ---
+ for survey in active_surveys:
? ^^^^^
survey.send_start_notification()
finally:
db.session.commit()
|
d3caf80485da78c8eb050ff4d9e33a2ee6c8feda
|
tests/rietveld/test_event_handler.py
|
tests/rietveld/test_event_handler.py
|
from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
def test_evt_change_gss_mode_exception(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
from __future__ import absolute_import, print_function
import pytest
from addie.rietveld import event_handler
@pytest.fixture
def rietveld_event_handler(qtbot):
return event_handler
def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler):
"""Test we can extract a bank id from bank workspace name"""
with pytest.raises(NotImplementedError) as e:
rietveld_event_handler.evt_change_gss_mode(None)
|
Refactor rietveld.event_handler test to use pytest-qt
|
Refactor rietveld.event_handler test to use pytest-qt
|
Python
|
mit
|
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
|
from __future__ import absolute_import, print_function
- import unittest
+ import pytest
- from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
+ @pytest.fixture
+ def rietveld_event_handler(qtbot):
+ return event_handler
- class RietveldEventHandlerTests(unittest.TestCase):
- def setUp(self):
- self.main_window = QApplication([])
-
- '''
- def tearDown(self):
- self.main_window.quit()
- '''
-
- def test_evt_change_gss_mode_exception(self):
- """Test we can extract a bank id from bank workspace name"""
- f = event_handler.evt_change_gss_mode
- self.assertRaises(NotImplementedError, f, None)
- if __name__ == '__main__':
- unittest.main() # pragma: no cover
+ def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler):
+ """Test we can extract a bank id from bank workspace name"""
+ with pytest.raises(NotImplementedError) as e:
+ rietveld_event_handler.evt_change_gss_mode(None)
|
Refactor rietveld.event_handler test to use pytest-qt
|
## Code Before:
from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
def test_evt_change_gss_mode_exception(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
## Instruction:
Refactor rietveld.event_handler test to use pytest-qt
## Code After:
from __future__ import absolute_import, print_function
import pytest
from addie.rietveld import event_handler
@pytest.fixture
def rietveld_event_handler(qtbot):
return event_handler
def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler):
"""Test we can extract a bank id from bank workspace name"""
with pytest.raises(NotImplementedError) as e:
rietveld_event_handler.evt_change_gss_mode(None)
|
from __future__ import absolute_import, print_function
- import unittest
? ^^^^
+ import pytest
? ^^
- from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
+ @pytest.fixture
+ def rietveld_event_handler(qtbot):
+ return event_handler
- class RietveldEventHandlerTests(unittest.TestCase):
- def setUp(self):
- self.main_window = QApplication([])
-
- '''
- def tearDown(self):
- self.main_window.quit()
- '''
-
- def test_evt_change_gss_mode_exception(self):
- """Test we can extract a bank id from bank workspace name"""
- f = event_handler.evt_change_gss_mode
- self.assertRaises(NotImplementedError, f, None)
- if __name__ == '__main__':
- unittest.main() # pragma: no cover
+ def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler):
+ """Test we can extract a bank id from bank workspace name"""
+ with pytest.raises(NotImplementedError) as e:
+ rietveld_event_handler.evt_change_gss_mode(None)
|
235f8061caa667f7c9bc1f424e14326c22932547
|
Examples/Infovis/Python/cone_layout.py
|
Examples/Infovis/Python/cone_layout.py
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
reader.Update()
print reader.GetOutput()
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
Remove errant printout in python cone layout example.
|
ENH: Remove errant printout in python cone layout example.
|
Python
|
bsd-3-clause
|
daviddoria/PointGraphsPhase1,jmerkow/VTK,mspark93/VTK,hendradarwin/VTK,aashish24/VTK-old,Wuteyan/VTK,jmerkow/VTK,berendkleinhaneveld/VTK,msmolens/VTK,ashray/VTK-EVM,msmolens/VTK,hendradarwin/VTK,sumedhasingla/VTK,ashray/VTK-EVM,johnkit/vtk-dev,sumedhasingla/VTK,SimVascular/VTK,sankhesh/VTK,candy7393/VTK,jmerkow/VTK,spthaolt/VTK,candy7393/VTK,mspark93/VTK,mspark93/VTK,spthaolt/VTK,msmolens/VTK,Wuteyan/VTK,gram526/VTK,jeffbaumes/jeffbaumes-vtk,biddisco/VTK,sumedhasingla/VTK,demarle/VTK,aashish24/VTK-old,gram526/VTK,sankhesh/VTK,candy7393/VTK,cjh1/VTK,keithroe/vtkoptix,ashray/VTK-EVM,keithroe/vtkoptix,naucoin/VTKSlicerWidgets,biddisco/VTK,cjh1/VTK,msmolens/VTK,jeffbaumes/jeffbaumes-vtk,ashray/VTK-EVM,arnaudgelas/VTK,jeffbaumes/jeffbaumes-vtk,spthaolt/VTK,spthaolt/VTK,sankhesh/VTK,candy7393/VTK,keithroe/vtkoptix,spthaolt/VTK,SimVascular/VTK,collects/VTK,sankhesh/VTK,gram526/VTK,ashray/VTK-EVM,sumedhasingla/VTK,demarle/VTK,Wuteyan/VTK,jmerkow/VTK,candy7393/VTK,msmolens/VTK,berendkleinhaneveld/VTK,spthaolt/VTK,biddisco/VTK,SimVascular/VTK,collects/VTK,daviddoria/PointGraphsPhase1,jmerkow/VTK,hendradarwin/VTK,demarle/VTK,msmolens/VTK,biddisco/VTK,SimVascular/VTK,sumedhasingla/VTK,Wuteyan/VTK,johnkit/vtk-dev,msmolens/VTK,sankhesh/VTK,cjh1/VTK,aashish24/VTK-old,naucoin/VTKSlicerWidgets,keithroe/vtkoptix,johnkit/vtk-dev,sankhesh/VTK,jeffbaumes/jeffbaumes-vtk,daviddoria/PointGraphsPhase1,demarle/VTK,berendkleinhaneveld/VTK,cjh1/VTK,demarle/VTK,msmolens/VTK,aashish24/VTK-old,keithroe/vtkoptix,daviddoria/PointGraphsPhase1,biddisco/VTK,berendkleinhaneveld/VTK,naucoin/VTKSlicerWidgets,berendkleinhaneveld/VTK,cjh1/VTK,collects/VTK,johnkit/vtk-dev,naucoin/VTKSlicerWidgets,SimVascular/VTK,SimVascular/VTK,johnkit/vtk-dev,gram526/VTK,Wuteyan/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,ashray/VTK-EVM,candy7393/VTK,jmerkow/VTK,keithroe/vtkoptix,arnaudgelas/VTK,sumedhasingla/VTK,naucoin/VTKSlicerWidgets,arnaudgelas/VTK,daviddoria/PointGraphsPhase1,johnkit/vtk-dev,gram526/VTK,gram526/VTK,jmerkow/VTK,sumedhasingla/VTK,arnaudgelas/VTK,hendradarwin/VTK,spthaolt/VTK,gram526/VTK,cjh1/VTK,biddisco/VTK,collects/VTK,berendkleinhaneveld/VTK,sankhesh/VTK,demarle/VTK,sumedhasingla/VTK,aashish24/VTK-old,aashish24/VTK-old,biddisco/VTK,mspark93/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,mspark93/VTK,candy7393/VTK,candy7393/VTK,jmerkow/VTK,collects/VTK,keithroe/vtkoptix,keithroe/vtkoptix,Wuteyan/VTK,mspark93/VTK,collects/VTK,gram526/VTK,johnkit/vtk-dev,SimVascular/VTK,arnaudgelas/VTK,ashray/VTK-EVM,mspark93/VTK,hendradarwin/VTK,Wuteyan/VTK,mspark93/VTK,berendkleinhaneveld/VTK,SimVascular/VTK,naucoin/VTKSlicerWidgets,demarle/VTK,arnaudgelas/VTK,sankhesh/VTK,demarle/VTK,ashray/VTK-EVM,daviddoria/PointGraphsPhase1
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
- reader.Update()
- print reader.GetOutput()
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
Remove errant printout in python cone layout example.
|
## Code Before:
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
reader.Update()
print reader.GetOutput()
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
## Instruction:
Remove errant printout in python cone layout example.
## Code After:
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
- reader.Update()
- print reader.GetOutput()
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
55d4b7b939fc218d47a920761350aee7bee91eb9
|
opps/article/views.py
|
opps/article/views.py
|
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
return 'channel/{0}.html'.format(self.kwargs['channel__long_slug'])
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
|
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
|
Fix template name on entry home page (/) on list page
|
Fix template name on entry home page (/) on list page
|
Python
|
mit
|
opps/opps,williamroot/opps,williamroot/opps,opps/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps
|
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
+ long_slug = self.kwargs.get('channel__long_slug', 'home')
- return 'channel/{0}.html'.format(self.kwargs['channel__long_slug'])
+ return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
|
Fix template name on entry home page (/) on list page
|
## Code Before:
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
return 'channel/{0}.html'.format(self.kwargs['channel__long_slug'])
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
## Instruction:
Fix template name on entry home page (/) on list page
## Code After:
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
|
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
+ long_slug = self.kwargs.get('channel__long_slug', 'home')
- return 'channel/{0}.html'.format(self.kwargs['channel__long_slug'])
? ---------------------- --
+ return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
|
54cfb9864256b27b9f4cd411f170cc12d47727e5
|
appengine/components/components/machine_provider/dimensions.py
|
appengine/components/components/machine_provider/dimensions.py
|
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
VSPHERE = 2
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
Add enum field for vSphere backend
|
Add enum field for vSphere backend
Review-Url: https://codereview.chromium.org/1997903002
|
Python
|
apache-2.0
|
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
|
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
+ VSPHERE = 2
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
Add enum field for vSphere backend
|
## Code Before:
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
## Instruction:
Add enum field for vSphere backend
## Code After:
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
VSPHERE = 2
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
+ VSPHERE = 2
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
6c870e242914d40601bf7ad24e48af2b0d28559e
|
notification/urls.py
|
notification/urls.py
|
from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
Change to work with django 1.7
|
Change to work with django 1.7
|
Python
|
mit
|
daniell/django-notification,daniell/django-notification
|
+ try:
- from django.conf.urls.defaults import *
+ from django.conf.urls.defaults import *
+ except ImportError:
+ from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
Change to work with django 1.7
|
## Code Before:
from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
## Instruction:
Change to work with django 1.7
## Code After:
try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
+ try:
- from django.conf.urls.defaults import *
+ from django.conf.urls.defaults import *
? ++++
+ except ImportError:
+ from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
1583aaf429e252f32439759e1363f3908efa0b03
|
tasks.py
|
tasks.py
|
from celery import Celery
from allsky import single_image_raspistill
celery = Celery('tasks', broker='redis://localhost:6379/0', backend='redis://localhost:6379/0')
@celery.task
def background_task():
# some long running task here (this simple example has no output)
pid = single_image_raspistill(filename='static/snap.jpg')
|
from celery import Celery
from allsky import single_image_raspistill
celery = Celery('tasks', broker='redis://localhost:6379/0', backend='redis://localhost:6379/0')
@celery.task
def background_task():
# some long running task here (this simple example has no output)
pid = single_image_raspistill(filename='static/snap.jpg', exp=120000000)
|
Set a higher default for darks
|
Set a higher default for darks
|
Python
|
mit
|
zemogle/raspberrysky
|
from celery import Celery
from allsky import single_image_raspistill
celery = Celery('tasks', broker='redis://localhost:6379/0', backend='redis://localhost:6379/0')
@celery.task
def background_task():
# some long running task here (this simple example has no output)
- pid = single_image_raspistill(filename='static/snap.jpg')
+ pid = single_image_raspistill(filename='static/snap.jpg', exp=120000000)
|
Set a higher default for darks
|
## Code Before:
from celery import Celery
from allsky import single_image_raspistill
celery = Celery('tasks', broker='redis://localhost:6379/0', backend='redis://localhost:6379/0')
@celery.task
def background_task():
# some long running task here (this simple example has no output)
pid = single_image_raspistill(filename='static/snap.jpg')
## Instruction:
Set a higher default for darks
## Code After:
from celery import Celery
from allsky import single_image_raspistill
celery = Celery('tasks', broker='redis://localhost:6379/0', backend='redis://localhost:6379/0')
@celery.task
def background_task():
# some long running task here (this simple example has no output)
pid = single_image_raspistill(filename='static/snap.jpg', exp=120000000)
|
from celery import Celery
from allsky import single_image_raspistill
celery = Celery('tasks', broker='redis://localhost:6379/0', backend='redis://localhost:6379/0')
@celery.task
def background_task():
# some long running task here (this simple example has no output)
- pid = single_image_raspistill(filename='static/snap.jpg')
+ pid = single_image_raspistill(filename='static/snap.jpg', exp=120000000)
? +++++++++++++++
|
fe768f5d8c1081f69acd8cf656aa618da7caf93b
|
cbpos/mod/currency/views/config.py
|
cbpos/mod/currency/views/config.py
|
from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
from PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
Handle unset default currency better
|
Handle unset default currency better
|
Python
|
mit
|
coinbox/coinbox-mod-currency
|
from PySide import QtGui
import cbpos
+ import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
- default_id = cbpos.config['mod.currency', 'default']
+ default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
Handle unset default currency better
|
## Code Before:
from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
## Instruction:
Handle unset default currency better
## Code After:
from PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
from PySide import QtGui
import cbpos
+ import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
- default_id = cbpos.config['mod.currency', 'default']
+ default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
f0118092290355b486ed1c524c7f41cdb7c5697e
|
server.py
|
server.py
|
from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Constructor to initialize the server objects
def __init__(self, *args, **kwargs):
#Call the super constructor
Server.__init__(self, *args, **kwargs)
#Create the objects to hold our game ID and list of running games
self.games = []
self.queue = None
self.gameIndex = 0
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
|
from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Constructor to initialize the server objects
def __init__(self, *args, **kwargs):
#Call the super constructor
Server.__init__(self, *args, **kwargs)
#Create the objects to hold our game ID and list of running games
self.games = []
self.queue = None
self.gameIndex = 0
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
#Create the game class to hold information about any particular game
class Game(object):
#Constructor
def __init__(self, player0, gameIndex):
#Set the initial positions of each player
self.p1x = 0
self.p1y = 0
self.p2x = 550
self.p2y = 0
#Store the network channel of each client
self.player0 = player0
self.player1 = None
#Set the game id
self.gameID = gameIndex
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
|
Create a game class to store information about an instantiated game object
|
Create a game class to store information about an instantiated game object
|
Python
|
mit
|
thebillington/pygame_multiplayer_server
|
from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Constructor to initialize the server objects
def __init__(self, *args, **kwargs):
#Call the super constructor
Server.__init__(self, *args, **kwargs)
#Create the objects to hold our game ID and list of running games
self.games = []
self.queue = None
self.gameIndex = 0
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
-
+
+ #Create the game class to hold information about any particular game
+ class Game(object):
+
+ #Constructor
+ def __init__(self, player0, gameIndex):
+
+ #Set the initial positions of each player
+ self.p1x = 0
+ self.p1y = 0
+ self.p2x = 550
+ self.p2y = 0
+
+ #Store the network channel of each client
+ self.player0 = player0
+ self.player1 = None
+
+ #Set the game id
+ self.gameID = gameIndex
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
|
Create a game class to store information about an instantiated game object
|
## Code Before:
from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Constructor to initialize the server objects
def __init__(self, *args, **kwargs):
#Call the super constructor
Server.__init__(self, *args, **kwargs)
#Create the objects to hold our game ID and list of running games
self.games = []
self.queue = None
self.gameIndex = 0
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
## Instruction:
Create a game class to store information about an instantiated game object
## Code After:
from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Constructor to initialize the server objects
def __init__(self, *args, **kwargs):
#Call the super constructor
Server.__init__(self, *args, **kwargs)
#Create the objects to hold our game ID and list of running games
self.games = []
self.queue = None
self.gameIndex = 0
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
#Create the game class to hold information about any particular game
class Game(object):
#Constructor
def __init__(self, player0, gameIndex):
#Set the initial positions of each player
self.p1x = 0
self.p1y = 0
self.p2x = 550
self.p2y = 0
#Store the network channel of each client
self.player0 = player0
self.player1 = None
#Set the game id
self.gameID = gameIndex
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
|
from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Constructor to initialize the server objects
def __init__(self, *args, **kwargs):
#Call the super constructor
Server.__init__(self, *args, **kwargs)
#Create the objects to hold our game ID and list of running games
self.games = []
self.queue = None
self.gameIndex = 0
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
-
+
+ #Create the game class to hold information about any particular game
+ class Game(object):
+
+ #Constructor
+ def __init__(self, player0, gameIndex):
+
+ #Set the initial positions of each player
+ self.p1x = 0
+ self.p1y = 0
+ self.p2x = 550
+ self.p2y = 0
+
+ #Store the network channel of each client
+ self.player0 = player0
+ self.player1 = None
+
+ #Set the game id
+ self.gameID = gameIndex
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
|
26ffa0cdd1389e2a364531cd20e9f37ee1565cce
|
base/view_utils.py
|
base/view_utils.py
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
Use 'o' as the order by parameter in clean_query_string
|
Use 'o' as the order by parameter in clean_query_string
|
Python
|
mit
|
magnet-cl/django-project-template-py3,Angoreher/xcero,Angoreher/xcero,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,Angoreher/xcero,Angoreher/xcero
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
- (k, v) for k, v in request.GET.items() if not k.startswith('o')
+ (k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
Use 'o' as the order by parameter in clean_query_string
|
## Code Before:
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
## Instruction:
Use 'o' as the order by parameter in clean_query_string
## Code After:
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
- (k, v) for k, v in request.GET.items() if not k.startswith('o')
? ^^^ ^^^^^^^^^^^^^ -
+ (k, v) for k, v in request.GET.items() if k != 'o'
? ^ ^^^
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
c9da64ac1c90abdee8fc72488a4bef58a95aa7c6
|
biwako/bin/fields/compounds.py
|
biwako/bin/fields/compounds.py
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
return values
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
Fix List to use the new decoding system
|
Fix List to use the new decoding system
|
Python
|
bsd-3-clause
|
gulopine/steel
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
- instance_field = field.for_instance(self.instance)
+ instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
- return values
+ raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
Fix List to use the new decoding system
|
## Code Before:
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
return values
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
## Instruction:
Fix List to use the new decoding system
## Code After:
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
- instance_field = field.for_instance(self.instance)
+ instance_field = self.field.for_instance(self.instance)
? +++++
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
- return values
+ raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
24a0fcbaea3bca88278f294f41e4b6abd1e82cf3
|
src/rocommand/__init__.py
|
src/rocommand/__init__.py
|
__version__ = "0.2.6" # Enhacements to handling of directories and external references
|
__version__ = "0.2.7" # Decouple MINIM constraints from target RO
# ROSRS (v6) support, support evaluation of RODL/ROSRS objects
# new annotation and linking options, annotations with CURIE (QName) properties
# add ro remove command, fix URI escaping problems
|
Add comments summarizing changes in this version
|
Add comments summarizing changes in this version
|
Python
|
mit
|
wf4ever/ro-manager,wf4ever/ro-manager,wf4ever/ro-manager,wf4ever/ro-manager
|
- __version__ = "0.2.6" # Enhacements to handling of directories and external references
+ __version__ = "0.2.7" # Decouple MINIM constraints from target RO
+ # ROSRS (v6) support, support evaluation of RODL/ROSRS objects
+ # new annotation and linking options, annotations with CURIE (QName) properties
+ # add ro remove command, fix URI escaping problems
+
|
Add comments summarizing changes in this version
|
## Code Before:
__version__ = "0.2.6" # Enhacements to handling of directories and external references
## Instruction:
Add comments summarizing changes in this version
## Code After:
__version__ = "0.2.7" # Decouple MINIM constraints from target RO
# ROSRS (v6) support, support evaluation of RODL/ROSRS objects
# new annotation and linking options, annotations with CURIE (QName) properties
# add ro remove command, fix URI escaping problems
|
- __version__ = "0.2.6" # Enhacements to handling of directories and external references
+ __version__ = "0.2.7" # Decouple MINIM constraints from target RO
+ # ROSRS (v6) support, support evaluation of RODL/ROSRS objects
+ # new annotation and linking options, annotations with CURIE (QName) properties
+ # add ro remove command, fix URI escaping problems
+
|
88e5ecad9966057203a9cbecaeaecdca3e76b6da
|
tests/fake_filesystem.py
|
tests/fake_filesystem.py
|
import os
import stat
from StringIO import StringIO
from types import StringTypes
import paramiko as ssh
class FakeFile(StringIO):
def __init__(self, value=None, path=None):
init = lambda x: StringIO.__init__(self, x)
if value is None:
init("")
ftype = 'dir'
size = 4096
else:
init(value)
ftype = 'file'
size = len(value)
attr = ssh.SFTPAttributes()
attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
attr.st_size = size
attr.filename = os.path.basename(path)
self.attributes = attr
def __str__(self):
return self.getvalue()
def write(self, value):
StringIO.write(self, value)
self.attributes.st_size = len(self.getvalue())
class FakeFilesystem(dict):
def __init__(self, d=None):
# Replicate input dictionary using our custom __setitem__
d = d or {}
for key, value in d.iteritems():
self[key] = value
def __setitem__(self, key, value):
if isinstance(value, StringTypes) or value is None:
value = FakeFile(value, key)
super(FakeFilesystem, self).__setitem__(key, value)
|
import os
import stat
from StringIO import StringIO
from types import StringTypes
import paramiko as ssh
class FakeFile(StringIO):
def __init__(self, value=None, path=None):
init = lambda x: StringIO.__init__(self, x)
if value is None:
init("")
ftype = 'dir'
size = 4096
else:
init(value)
ftype = 'file'
size = len(value)
attr = ssh.SFTPAttributes()
attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
attr.st_size = size
attr.filename = os.path.basename(path)
self.attributes = attr
def __str__(self):
return self.getvalue()
def write(self, value):
StringIO.write(self, value)
self.attributes.st_size = len(self.getvalue())
def close(self):
"""
Always hold fake files open.
"""
pass
class FakeFilesystem(dict):
def __init__(self, d=None):
# Replicate input dictionary using our custom __setitem__
d = d or {}
for key, value in d.iteritems():
self[key] = value
def __setitem__(self, key, value):
if isinstance(value, StringTypes) or value is None:
value = FakeFile(value, key)
super(FakeFilesystem, self).__setitem__(key, value)
|
Define noop close() for FakeFile
|
Define noop close() for FakeFile
|
Python
|
bsd-2-clause
|
kxxoling/fabric,rodrigc/fabric,qinrong/fabric,elijah513/fabric,bspink/fabric,MjAbuz/fabric,cmattoon/fabric,hrubi/fabric,felix-d/fabric,askulkarni2/fabric,SamuelMarks/fabric,mathiasertl/fabric,tekapo/fabric,StackStorm/fabric,ploxiln/fabric,kmonsoor/fabric,raimon49/fabric,haridsv/fabric,bitprophet/fabric,fernandezcuesta/fabric,itoed/fabric,rane-hs/fabric-py3,sdelements/fabric,likesxuqiang/fabric,bitmonk/fabric,getsentry/fabric,opavader/fabric,jaraco/fabric,xLegoz/fabric,TarasRudnyk/fabric,pgroudas/fabric,akaariai/fabric,rbramwell/fabric,amaniak/fabric,cgvarela/fabric,tolbkni/fabric,pashinin/fabric
|
import os
import stat
from StringIO import StringIO
from types import StringTypes
import paramiko as ssh
class FakeFile(StringIO):
def __init__(self, value=None, path=None):
init = lambda x: StringIO.__init__(self, x)
if value is None:
init("")
ftype = 'dir'
size = 4096
else:
init(value)
ftype = 'file'
size = len(value)
attr = ssh.SFTPAttributes()
attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
attr.st_size = size
attr.filename = os.path.basename(path)
self.attributes = attr
def __str__(self):
return self.getvalue()
def write(self, value):
StringIO.write(self, value)
self.attributes.st_size = len(self.getvalue())
+ def close(self):
+ """
+ Always hold fake files open.
+ """
+ pass
+
class FakeFilesystem(dict):
def __init__(self, d=None):
# Replicate input dictionary using our custom __setitem__
d = d or {}
for key, value in d.iteritems():
self[key] = value
def __setitem__(self, key, value):
if isinstance(value, StringTypes) or value is None:
value = FakeFile(value, key)
super(FakeFilesystem, self).__setitem__(key, value)
|
Define noop close() for FakeFile
|
## Code Before:
import os
import stat
from StringIO import StringIO
from types import StringTypes
import paramiko as ssh
class FakeFile(StringIO):
def __init__(self, value=None, path=None):
init = lambda x: StringIO.__init__(self, x)
if value is None:
init("")
ftype = 'dir'
size = 4096
else:
init(value)
ftype = 'file'
size = len(value)
attr = ssh.SFTPAttributes()
attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
attr.st_size = size
attr.filename = os.path.basename(path)
self.attributes = attr
def __str__(self):
return self.getvalue()
def write(self, value):
StringIO.write(self, value)
self.attributes.st_size = len(self.getvalue())
class FakeFilesystem(dict):
def __init__(self, d=None):
# Replicate input dictionary using our custom __setitem__
d = d or {}
for key, value in d.iteritems():
self[key] = value
def __setitem__(self, key, value):
if isinstance(value, StringTypes) or value is None:
value = FakeFile(value, key)
super(FakeFilesystem, self).__setitem__(key, value)
## Instruction:
Define noop close() for FakeFile
## Code After:
import os
import stat
from StringIO import StringIO
from types import StringTypes
import paramiko as ssh
class FakeFile(StringIO):
def __init__(self, value=None, path=None):
init = lambda x: StringIO.__init__(self, x)
if value is None:
init("")
ftype = 'dir'
size = 4096
else:
init(value)
ftype = 'file'
size = len(value)
attr = ssh.SFTPAttributes()
attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
attr.st_size = size
attr.filename = os.path.basename(path)
self.attributes = attr
def __str__(self):
return self.getvalue()
def write(self, value):
StringIO.write(self, value)
self.attributes.st_size = len(self.getvalue())
def close(self):
"""
Always hold fake files open.
"""
pass
class FakeFilesystem(dict):
def __init__(self, d=None):
# Replicate input dictionary using our custom __setitem__
d = d or {}
for key, value in d.iteritems():
self[key] = value
def __setitem__(self, key, value):
if isinstance(value, StringTypes) or value is None:
value = FakeFile(value, key)
super(FakeFilesystem, self).__setitem__(key, value)
|
import os
import stat
from StringIO import StringIO
from types import StringTypes
import paramiko as ssh
class FakeFile(StringIO):
def __init__(self, value=None, path=None):
init = lambda x: StringIO.__init__(self, x)
if value is None:
init("")
ftype = 'dir'
size = 4096
else:
init(value)
ftype = 'file'
size = len(value)
attr = ssh.SFTPAttributes()
attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
attr.st_size = size
attr.filename = os.path.basename(path)
self.attributes = attr
def __str__(self):
return self.getvalue()
def write(self, value):
StringIO.write(self, value)
self.attributes.st_size = len(self.getvalue())
+ def close(self):
+ """
+ Always hold fake files open.
+ """
+ pass
+
class FakeFilesystem(dict):
def __init__(self, d=None):
# Replicate input dictionary using our custom __setitem__
d = d or {}
for key, value in d.iteritems():
self[key] = value
def __setitem__(self, key, value):
if isinstance(value, StringTypes) or value is None:
value = FakeFile(value, key)
super(FakeFilesystem, self).__setitem__(key, value)
|
b35e8fa3cde243aa444aa056d60f7a37b61e825b
|
tests/commands/test_usage.py
|
tests/commands/test_usage.py
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
Add usage tests for check-ufo-sources and check-specification
|
Add usage tests for check-ufo-sources and check-specification
|
Python
|
apache-2.0
|
googlefonts/fontbakery,googlefonts/fontbakery,graphicore/fontbakery,graphicore/fontbakery,moyogo/fontbakery,graphicore/fontbakery,moyogo/fontbakery,moyogo/fontbakery,googlefonts/fontbakery
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
- """Tests if the output from running `fontbakery --list-subcommands` matches
+ """Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
- import fontbakery.commands
+ import fontbakery.commands
- commands_dir = os.path.dirname(fontbakery.commands.__file__)
+ commands_dir = os.path.dirname(fontbakery.commands.__file__)
- scripts = [
+ scripts = [
- f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
+ f.rstrip(".py").replace("_", "-")
+ for f in os.listdir(commands_dir)
- if (f.endswith(".py") and not f.startswith('_'))
+ if (f.endswith(".py") and not f.startswith('_'))
- ]
+ ]
- subcommands = subprocess.check_output(['fontbakery',
+ subcommands = subprocess.check_output(['fontbakery',
- '--list-subcommands']).split()
+ '--list-subcommands']).split()
- assert sorted(scripts) == sorted(subcommands)
+ assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
- """Test if `fontbakery check-googlefonts` can run successfully`."""
+ """Test if `fontbakery check-googlefonts` can run successfully`."""
- subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
+ subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
- with pytest.raises(subprocess.CalledProcessError):
+ with pytest.raises(subprocess.CalledProcessError):
- subprocess.check_output(["fontbakery", "check-googlefonts"])
+ subprocess.check_output(["fontbakery", "check-googlefonts"])
+
+ def test_command_check_specification():
+ """Test if `fontbakery check-specification` can run successfully`."""
+ subprocess.check_output(["fontbakery", "check-specification", "-h"])
+
+ with pytest.raises(subprocess.CalledProcessError):
+ subprocess.check_output(["fontbakery", "check-specification"])
+
+
+ def test_command_check_ufo_sources():
+ """Test if `fontbakery check-ufo-sources` can run successfully`."""
+ subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
+
+ with pytest.raises(subprocess.CalledProcessError):
+ subprocess.check_output(["fontbakery", "check-ufo-sources"])
+
|
Add usage tests for check-ufo-sources and check-specification
|
## Code Before:
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
## Instruction:
Add usage tests for check-ufo-sources and check-specification
## Code After:
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
- """Tests if the output from running `fontbakery --list-subcommands` matches
? --
+ """Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
- import fontbakery.commands
? --
+ import fontbakery.commands
- commands_dir = os.path.dirname(fontbakery.commands.__file__)
? --
+ commands_dir = os.path.dirname(fontbakery.commands.__file__)
- scripts = [
? --
+ scripts = [
- f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
+ f.rstrip(".py").replace("_", "-")
+ for f in os.listdir(commands_dir)
- if (f.endswith(".py") and not f.startswith('_'))
? --
+ if (f.endswith(".py") and not f.startswith('_'))
- ]
? --
+ ]
- subcommands = subprocess.check_output(['fontbakery',
? --
+ subcommands = subprocess.check_output(['fontbakery',
- '--list-subcommands']).split()
? --
+ '--list-subcommands']).split()
- assert sorted(scripts) == sorted(subcommands)
? --
+ assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
- """Test if `fontbakery check-googlefonts` can run successfully`."""
? --
+ """Test if `fontbakery check-googlefonts` can run successfully`."""
- subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
? --
+ subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
- with pytest.raises(subprocess.CalledProcessError):
? --
+ with pytest.raises(subprocess.CalledProcessError):
- subprocess.check_output(["fontbakery", "check-googlefonts"])
? ----
+ subprocess.check_output(["fontbakery", "check-googlefonts"])
+
+
+ def test_command_check_specification():
+ """Test if `fontbakery check-specification` can run successfully`."""
+ subprocess.check_output(["fontbakery", "check-specification", "-h"])
+
+ with pytest.raises(subprocess.CalledProcessError):
+ subprocess.check_output(["fontbakery", "check-specification"])
+
+
+ def test_command_check_ufo_sources():
+ """Test if `fontbakery check-ufo-sources` can run successfully`."""
+ subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
+
+ with pytest.raises(subprocess.CalledProcessError):
+ subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
e42d38f9ad3f8b5229c9618e4dd9d6b371de89c5
|
test/test_am_bmi.py
|
test/test_am_bmi.py
|
import unittest
import utils
import os
import sys
import shutil
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(os.path.join(TOPDIR, 'lib'))
sys.path.append(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
if __name__ == '__main__':
unittest.main()
|
import unittest
import utils
import os
import sys
import shutil
import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
utils.set_search_paths(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
def test_main(self):
"""Test simple complete run of am_bmi"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input',
'pm.pdb.B10010001.pdb'), '.')
with open('SnapList.txt', 'w') as fh:
fh.write("pm.pdb.B10010001.pdb -100.0\n")
fh.write("high-energy.pdb -10.0\n")
subprocess.check_call(['cryptosite', 'am_bmi'])
with open('am_features.out') as fh:
lines = sorted(fh.readlines())
self.assertEqual(len(lines), 12)
self.assertEqual(lines[0], 'ALA\t1\tA\t17.328\t12.02\t32.6\t48.0\n')
if __name__ == '__main__':
unittest.main()
|
Test simple complete run of am_bmi.
|
Test simple complete run of am_bmi.
|
Python
|
lgpl-2.1
|
salilab/cryptosite,salilab/cryptosite,salilab/cryptosite
|
import unittest
import utils
import os
import sys
import shutil
+ import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
+ utils.set_search_paths(TOPDIR)
- sys.path.append(os.path.join(TOPDIR, 'lib'))
- sys.path.append(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
+ def test_main(self):
+ """Test simple complete run of am_bmi"""
+ with utils.temporary_working_directory() as tmpdir:
+ shutil.copy(os.path.join(TOPDIR, 'test', 'input',
+ 'pm.pdb.B10010001.pdb'), '.')
+ with open('SnapList.txt', 'w') as fh:
+ fh.write("pm.pdb.B10010001.pdb -100.0\n")
+ fh.write("high-energy.pdb -10.0\n")
+ subprocess.check_call(['cryptosite', 'am_bmi'])
+ with open('am_features.out') as fh:
+ lines = sorted(fh.readlines())
+ self.assertEqual(len(lines), 12)
+ self.assertEqual(lines[0], 'ALA\t1\tA\t17.328\t12.02\t32.6\t48.0\n')
+
if __name__ == '__main__':
unittest.main()
|
Test simple complete run of am_bmi.
|
## Code Before:
import unittest
import utils
import os
import sys
import shutil
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(os.path.join(TOPDIR, 'lib'))
sys.path.append(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
if __name__ == '__main__':
unittest.main()
## Instruction:
Test simple complete run of am_bmi.
## Code After:
import unittest
import utils
import os
import sys
import shutil
import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
utils.set_search_paths(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
def test_main(self):
"""Test simple complete run of am_bmi"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input',
'pm.pdb.B10010001.pdb'), '.')
with open('SnapList.txt', 'w') as fh:
fh.write("pm.pdb.B10010001.pdb -100.0\n")
fh.write("high-energy.pdb -10.0\n")
subprocess.check_call(['cryptosite', 'am_bmi'])
with open('am_features.out') as fh:
lines = sorted(fh.readlines())
self.assertEqual(len(lines), 12)
self.assertEqual(lines[0], 'ALA\t1\tA\t17.328\t12.02\t32.6\t48.0\n')
if __name__ == '__main__':
unittest.main()
|
import unittest
import utils
import os
import sys
import shutil
+ import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
+ utils.set_search_paths(TOPDIR)
- sys.path.append(os.path.join(TOPDIR, 'lib'))
- sys.path.append(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
+ def test_main(self):
+ """Test simple complete run of am_bmi"""
+ with utils.temporary_working_directory() as tmpdir:
+ shutil.copy(os.path.join(TOPDIR, 'test', 'input',
+ 'pm.pdb.B10010001.pdb'), '.')
+ with open('SnapList.txt', 'w') as fh:
+ fh.write("pm.pdb.B10010001.pdb -100.0\n")
+ fh.write("high-energy.pdb -10.0\n")
+ subprocess.check_call(['cryptosite', 'am_bmi'])
+ with open('am_features.out') as fh:
+ lines = sorted(fh.readlines())
+ self.assertEqual(len(lines), 12)
+ self.assertEqual(lines[0], 'ALA\t1\tA\t17.328\t12.02\t32.6\t48.0\n')
+
if __name__ == '__main__':
unittest.main()
|
35f45d3fcee5a1fe9d6d5ce71b708d0bc68db3fc
|
python/matasano/set1/c7.py
|
python/matasano/set1/c7.py
|
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64_to_bytes(chal_file.read()).encode('latin-1')
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
|
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
|
Switch to using base64 builtin decoder for simplicity.
|
Switch to using base64 builtin decoder for simplicity.
|
Python
|
mit
|
TheLunchtimeAttack/matasano-challenges,TheLunchtimeAttack/matasano-challenges
|
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
- ct = base64_to_bytes(chal_file.read()).encode('latin-1')
+ ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
|
Switch to using base64 builtin decoder for simplicity.
|
## Code Before:
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64_to_bytes(chal_file.read()).encode('latin-1')
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
## Instruction:
Switch to using base64 builtin decoder for simplicity.
## Code After:
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
|
from matasano.util.converters import base64_to_bytes
from Crypto.Cipher import AES
import base64
if __name__ == "__main__":
chal_file = open("matasano/data/c7.txt", 'r');
key = "YELLOW SUBMARINE"
# Instantiate the cipher
cipher = AES.new(key, AES.MODE_ECB)
# Covert from base64 to bytes and encode
- ct = base64_to_bytes(chal_file.read()).encode('latin-1')
+ ct = base64.b64decode(chal_file.read())
# Perform the decryption
pt = cipher.decrypt(ct)
print(pt.decode())
|
33fd4bba1f2c44e871051862db8071fadb0e9825
|
core-plugins/shared/1/dss/reporting-plugins/shared_create_metaproject/shared_create_metaproject.py
|
core-plugins/shared/1/dss/reporting-plugins/shared_create_metaproject/shared_create_metaproject.py
|
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
metaprojectCode = parameters.get("metaprojectCode")
username = parameters.get("userName")
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
"Test",
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
metaprojectDescr = parameters.get("metaprojectDescr")
if metaprojectDescr is None:
metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
metaprojectDescr,
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
Create metaproject with user-provided description.
|
Create metaproject with user-provided description.
|
Python
|
apache-2.0
|
aarpon/obit_shared_core_technology,aarpon/obit_shared_core_technology,aarpon/obit_shared_core_technology
|
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
+ username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
- username = parameters.get("userName")
+ metaprojectDescr = parameters.get("metaprojectDescr")
+ if metaprojectDescr is None:
+ metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
- "Test",
+ metaprojectDescr,
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
Create metaproject with user-provided description.
|
## Code Before:
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
metaprojectCode = parameters.get("metaprojectCode")
username = parameters.get("userName")
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
"Test",
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
## Instruction:
Create metaproject with user-provided description.
## Code After:
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
metaprojectDescr = parameters.get("metaprojectDescr")
if metaprojectDescr is None:
metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
metaprojectDescr,
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
def process(transaction, parameters, tableBuilder):
"""Create a project with user-defined name in given space.
"""
# Prepare the return table
tableBuilder.addHeader("success")
tableBuilder.addHeader("message")
# Add a row for the results
row = tableBuilder.addRow()
# Retrieve parameters from client
+ username = parameters.get("userName")
metaprojectCode = parameters.get("metaprojectCode")
- username = parameters.get("userName")
+ metaprojectDescr = parameters.get("metaprojectDescr")
+ if metaprojectDescr is None:
+ metaprojectDescr = ""
# Try retrieving the metaproject (tag)
metaproject = transaction.getMetaproject(metaprojectCode, username)
if metaproject is None:
# Create the metaproject (tag)
metaproject = transaction.createNewMetaproject(metaprojectCode,
- "Test",
? ^^ ^^
+ metaprojectDescr,
? ^^^^^^^^^^^^ ^^
username)
# Check that creation was succcessful
if metaproject is None:
success = "false"
message = "Could not create metaproject " + metaprojectCode + "."
else:
success = "true"
message = "Tag " + metaprojectCode + " successfully created."
else:
success = "false"
message = "Tag " + metaprojectCode + " exists already."
# Add the results to current row
row.setCell("success", success)
row.setCell("message", message)
|
f56f98d5ec2b9cd689349cc239ca550f1182563e
|
src/olympia/core/tests/test_db.py
|
src/olympia/core/tests/test_db.py
|
import pytest
from olympia.core.tests.db_tests_testapp.models import TestRegularCharField
@pytest.mark.django_db
@pytest.mark.parametrize('value', [
u'a',
u'🔍', # Magnifying Glass Tilted Left (U+1F50D)
u'❤', # Heavy Black Heart (U+2764, U+FE0F)
])
def test_max_length_utf8mb4(value):
TestRegularCharField.objects.create(name=value * 255)
assert TestRegularCharField.objects.get().name == value * 255
|
import os
import pytest
from olympia.core.tests.db_tests_testapp.models import TestRegularCharField
@pytest.mark.django_db
@pytest.mark.parametrize('value', [
u'a',
u'🔍', # Magnifying Glass Tilted Left (U+1F50D)
u'❤', # Heavy Black Heart (U+2764, U+FE0F)
])
def test_max_length_utf8mb4(value):
TestRegularCharField.objects.create(name=value * 255)
assert TestRegularCharField.objects.get().name == value * 255
def test_no_duplicate_migration_ids():
seen = set()
migration_ids = [
fname.split('-')[0] for fname in os.listdir('src/olympia/migrations/')
if fname.endswith('.sql')]
duplicates = {x for x in migration_ids if x in seen or seen.add(x)}
assert not duplicates
|
Add simple test to fail in case of duplicate migration ids.
|
Add simple test to fail in case of duplicate migration ids.
The test fails by showing which migrations are duplicated.
```python
src/olympia/core/tests/test_db.py:29: in test_no_duplicate_migration_ids
assert not duplicates
E AssertionError: assert not {'99'}
```
Fixes #11852
|
Python
|
bsd-3-clause
|
eviljeff/olympia,eviljeff/olympia,psiinon/addons-server,eviljeff/olympia,psiinon/addons-server,psiinon/addons-server,bqbn/addons-server,diox/olympia,mozilla/addons-server,mozilla/olympia,bqbn/addons-server,wagnerand/addons-server,eviljeff/olympia,mozilla/olympia,wagnerand/addons-server,mozilla/olympia,wagnerand/addons-server,wagnerand/addons-server,psiinon/addons-server,mozilla/addons-server,bqbn/addons-server,mozilla/addons-server,diox/olympia,mozilla/olympia,mozilla/addons-server,diox/olympia,diox/olympia,bqbn/addons-server
|
+ import os
import pytest
from olympia.core.tests.db_tests_testapp.models import TestRegularCharField
@pytest.mark.django_db
@pytest.mark.parametrize('value', [
u'a',
u'🔍', # Magnifying Glass Tilted Left (U+1F50D)
u'❤', # Heavy Black Heart (U+2764, U+FE0F)
])
def test_max_length_utf8mb4(value):
TestRegularCharField.objects.create(name=value * 255)
assert TestRegularCharField.objects.get().name == value * 255
+
+ def test_no_duplicate_migration_ids():
+ seen = set()
+
+ migration_ids = [
+ fname.split('-')[0] for fname in os.listdir('src/olympia/migrations/')
+ if fname.endswith('.sql')]
+
+ duplicates = {x for x in migration_ids if x in seen or seen.add(x)}
+
+ assert not duplicates
+
|
Add simple test to fail in case of duplicate migration ids.
|
## Code Before:
import pytest
from olympia.core.tests.db_tests_testapp.models import TestRegularCharField
@pytest.mark.django_db
@pytest.mark.parametrize('value', [
u'a',
u'🔍', # Magnifying Glass Tilted Left (U+1F50D)
u'❤', # Heavy Black Heart (U+2764, U+FE0F)
])
def test_max_length_utf8mb4(value):
TestRegularCharField.objects.create(name=value * 255)
assert TestRegularCharField.objects.get().name == value * 255
## Instruction:
Add simple test to fail in case of duplicate migration ids.
## Code After:
import os
import pytest
from olympia.core.tests.db_tests_testapp.models import TestRegularCharField
@pytest.mark.django_db
@pytest.mark.parametrize('value', [
u'a',
u'🔍', # Magnifying Glass Tilted Left (U+1F50D)
u'❤', # Heavy Black Heart (U+2764, U+FE0F)
])
def test_max_length_utf8mb4(value):
TestRegularCharField.objects.create(name=value * 255)
assert TestRegularCharField.objects.get().name == value * 255
def test_no_duplicate_migration_ids():
seen = set()
migration_ids = [
fname.split('-')[0] for fname in os.listdir('src/olympia/migrations/')
if fname.endswith('.sql')]
duplicates = {x for x in migration_ids if x in seen or seen.add(x)}
assert not duplicates
|
+ import os
import pytest
from olympia.core.tests.db_tests_testapp.models import TestRegularCharField
@pytest.mark.django_db
@pytest.mark.parametrize('value', [
u'a',
u'🔍', # Magnifying Glass Tilted Left (U+1F50D)
u'❤', # Heavy Black Heart (U+2764, U+FE0F)
])
def test_max_length_utf8mb4(value):
TestRegularCharField.objects.create(name=value * 255)
assert TestRegularCharField.objects.get().name == value * 255
+
+
+ def test_no_duplicate_migration_ids():
+ seen = set()
+
+ migration_ids = [
+ fname.split('-')[0] for fname in os.listdir('src/olympia/migrations/')
+ if fname.endswith('.sql')]
+
+ duplicates = {x for x in migration_ids if x in seen or seen.add(x)}
+
+ assert not duplicates
|
f0b27af3cc09808146442c94df7c76127776acf8
|
gslib/devshell_auth_plugin.py
|
gslib/devshell_auth_plugin.py
|
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
|
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
|
Fix provider check causing Devshell auth failure
|
Fix provider check causing Devshell auth failure
This commit builds on commit 13c4926, allowing Devshell credentials to
be used only with Google storage.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/gsutil,GoogleCloudPlatform/gsutil,fishjord/gsutil,BrandonY/gsutil
|
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
+ # Provider here is a boto.provider.Provider object (as opposed to the
+ # provider attribute of CloudApi objects, which is a string).
- if provider != 'gs':
+ if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
|
Fix provider check causing Devshell auth failure
|
## Code Before:
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
## Instruction:
Fix provider check causing Devshell auth failure
## Code After:
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
|
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
+ # Provider here is a boto.provider.Provider object (as opposed to the
+ # provider attribute of CloudApi objects, which is a string).
- if provider != 'gs':
? ^
+ if provider.name != 'google':
? +++++ ^^^^^
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
|
e6af345239f2778a2245d9f8be54bf754224aafd
|
tests/helper.py
|
tests/helper.py
|
def mock_api(path, file_path):
from httmock import urlmatch, response
@urlmatch(scheme = 'https', netloc = 'api.webpay.jp', path = '/v1' + path)
def webpay_api_mock(url, request):
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status, content = body.encode('utf-8'), headers = headers, request = request)
return webpay_api_mock
|
def mock_api(path, file_path, query = None, data = None):
from httmock import urlmatch, response
import json
@urlmatch(scheme = 'https', netloc = 'api.webpay.jp', path = '/v1' + path)
def webpay_api_mock(url, request):
assert query is None or url.query == query
assert data is None or json.loads(request.body) == data
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status, content = body.encode('utf-8'), headers = headers, request = request)
return webpay_api_mock
|
Add assertion for query and data of request
|
Add assertion for query and data of request
|
Python
|
mit
|
yamaneko1212/webpay-python
|
- def mock_api(path, file_path):
+ def mock_api(path, file_path, query = None, data = None):
from httmock import urlmatch, response
+ import json
+
@urlmatch(scheme = 'https', netloc = 'api.webpay.jp', path = '/v1' + path)
def webpay_api_mock(url, request):
+ assert query is None or url.query == query
+ assert data is None or json.loads(request.body) == data
+
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status, content = body.encode('utf-8'), headers = headers, request = request)
return webpay_api_mock
|
Add assertion for query and data of request
|
## Code Before:
def mock_api(path, file_path):
from httmock import urlmatch, response
@urlmatch(scheme = 'https', netloc = 'api.webpay.jp', path = '/v1' + path)
def webpay_api_mock(url, request):
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status, content = body.encode('utf-8'), headers = headers, request = request)
return webpay_api_mock
## Instruction:
Add assertion for query and data of request
## Code After:
def mock_api(path, file_path, query = None, data = None):
from httmock import urlmatch, response
import json
@urlmatch(scheme = 'https', netloc = 'api.webpay.jp', path = '/v1' + path)
def webpay_api_mock(url, request):
assert query is None or url.query == query
assert data is None or json.loads(request.body) == data
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status, content = body.encode('utf-8'), headers = headers, request = request)
return webpay_api_mock
|
- def mock_api(path, file_path):
+ def mock_api(path, file_path, query = None, data = None):
from httmock import urlmatch, response
+ import json
+
@urlmatch(scheme = 'https', netloc = 'api.webpay.jp', path = '/v1' + path)
def webpay_api_mock(url, request):
+ assert query is None or url.query == query
+ assert data is None or json.loads(request.body) == data
+
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status, content = body.encode('utf-8'), headers = headers, request = request)
return webpay_api_mock
|
f3e39d2250a9c56a2beb6a1a9c1c4dafb97e8c7f
|
encoder/vgg.py
|
encoder/vgg.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
Update to VGG laod from datadir
|
Update to VGG laod from datadir
|
Python
|
mit
|
MarvinTeichmann/KittiBox,MarvinTeichmann/KittiBox
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
- vgg_fcn = fcn8_vgg.FCN8VGG()
+ vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
+ vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
Update to VGG laod from datadir
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
## Instruction:
Update to VGG laod from datadir
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
- vgg_fcn = fcn8_vgg.FCN8VGG()
+ vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
+ vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
d8a7abd16e115e142299a4c1ed01b18b15a5b806
|
tests/test_hashring.py
|
tests/test_hashring.py
|
from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
|
from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
|
Add additional test for strings
|
Add additional test for strings
|
Python
|
bsd-2-clause
|
goller/hashring
|
from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
+
+ def test_server_ring():
+ memcache_servers = ['192.168.0.246:11212',
+ '192.168.0.247:11212',
+ '192.168.0.249:11212']
+
+ ring = HashRing(memcache_servers)
+ actual = ring.get_node('my_key')
+ expected = '192.168.0.247:11212'
+ assert expected == actual
+
|
Add additional test for strings
|
## Code Before:
from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
## Instruction:
Add additional test for strings
## Code After:
from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
def test_server_ring():
memcache_servers = ['192.168.0.246:11212',
'192.168.0.247:11212',
'192.168.0.249:11212']
ring = HashRing(memcache_servers)
actual = ring.get_node('my_key')
expected = '192.168.0.247:11212'
assert expected == actual
|
from hashring import HashRing
def test_basic_ring():
hr = HashRing(range(3))
actual = hr.get_node('howdy')
expected = 1
assert expected == actual
+
+
+ def test_server_ring():
+ memcache_servers = ['192.168.0.246:11212',
+ '192.168.0.247:11212',
+ '192.168.0.249:11212']
+
+ ring = HashRing(memcache_servers)
+ actual = ring.get_node('my_key')
+ expected = '192.168.0.247:11212'
+ assert expected == actual
|
b16016994f20945a8a2bbb63b9cb920d856ab66f
|
web/attempts/migrations/0008_add_submission_date.py
|
web/attempts/migrations/0008_add_submission_date.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = (
SELECT max(history_date)
FROM attempts_historicalattempt
WHERE attempts_attempt.user_id = user_id
AND attempts_attempt.part_id = part_id
)
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = subquery.submission_date
FROM (
SELECT user_id, part_id, max(history_date) AS submission_date
FROM attempts_historicalattempt
GROUP BY user_id, part_id
) AS subquery
WHERE attempts_attempt.user_id = subquery.user_id
AND attempts_attempt.part_id = subquery.part_id
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
|
Revert "Make migration SQLite compatible"
|
Revert "Make migration SQLite compatible"
This reverts commit 768d85cccb17c8757dd8d14dad220d0b87568264.
|
Python
|
agpl-3.0
|
ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
- SET submission_date = (
+ SET submission_date = subquery.submission_date
- SELECT max(history_date)
+ FROM (
+ SELECT user_id, part_id, max(history_date) AS submission_date
FROM attempts_historicalattempt
+ GROUP BY user_id, part_id
+ ) AS subquery
- WHERE attempts_attempt.user_id = user_id
+ WHERE attempts_attempt.user_id = subquery.user_id
- AND attempts_attempt.part_id = part_id
+ AND attempts_attempt.part_id = subquery.part_id
- )
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
|
Revert "Make migration SQLite compatible"
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = (
SELECT max(history_date)
FROM attempts_historicalattempt
WHERE attempts_attempt.user_id = user_id
AND attempts_attempt.part_id = part_id
)
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
## Instruction:
Revert "Make migration SQLite compatible"
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = subquery.submission_date
FROM (
SELECT user_id, part_id, max(history_date) AS submission_date
FROM attempts_historicalattempt
GROUP BY user_id, part_id
) AS subquery
WHERE attempts_attempt.user_id = subquery.user_id
AND attempts_attempt.part_id = subquery.part_id
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
- SET submission_date = (
? ^
+ SET submission_date = subquery.submission_date
? ^^^^^^^^^^^^^^^^^^^^^^^^
- SELECT max(history_date)
+ FROM (
+ SELECT user_id, part_id, max(history_date) AS submission_date
FROM attempts_historicalattempt
+ GROUP BY user_id, part_id
+ ) AS subquery
- WHERE attempts_attempt.user_id = user_id
? -----
+ WHERE attempts_attempt.user_id = subquery.user_id
? +++++++++
- AND attempts_attempt.part_id = part_id
? -----
+ AND attempts_attempt.part_id = subquery.part_id
? +++++++++
- )
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
|
31691ca909fe0b1816d89bb4ccf69974eca882a6
|
allauth/app_settings.py
|
allauth/app_settings.py
|
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
def check_context_processors():
allauth_ctx = 'allauth.socialaccount.context_processors.socialaccount'
ctx_present = False
if django.VERSION < (1, 8,):
if allauth_ctx in settings.TEMPLATE_CONTEXT_PROCESSORS:
ctx_present = True
else:
for engine in settings.TEMPLATES:
if allauth_ctx in engine.get('OPTIONS', {})\
.get('context_processors', []):
ctx_present = True
break
if not ctx_present:
excmsg = ("socialaccount context processor "
"not found in settings.TEMPLATE_CONTEXT_PROCESSORS."
"See settings.py instructions here: "
"https://github.com/pennersr/django-allauth#installation")
raise ImproperlyConfigured(excmsg)
if SOCIALACCOUNT_ENABLED:
check_context_processors()
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django import template
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
def check_context_processors():
allauth_ctx = 'allauth.socialaccount.context_processors.socialaccount'
ctx_present = False
if django.VERSION < (1, 8,):
if allauth_ctx in settings.TEMPLATE_CONTEXT_PROCESSORS:
ctx_present = True
else:
for engine in template.engines.templates.values():
if allauth_ctx in engine.get('OPTIONS', {})\
.get('context_processors', []):
ctx_present = True
break
if not ctx_present:
excmsg = ("socialaccount context processor "
"not found in settings.TEMPLATE_CONTEXT_PROCESSORS."
"See settings.py instructions here: "
"https://github.com/pennersr/django-allauth#installation")
raise ImproperlyConfigured(excmsg)
if SOCIALACCOUNT_ENABLED:
check_context_processors()
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
Fix for checking the context processors on Django 1.8
|
Fix for checking the context processors on Django 1.8
If the user has not migrated their settings file to use the new TEMPLATES
method in Django 1.8, settings.TEMPLATES is an empty list.
Instead, if we check django.templates.engines it will be populated with the
automatically migrated data from settings.TEMPLATE*.
|
Python
|
mit
|
cudadog/django-allauth,bitcity/django-allauth,petersanchez/django-allauth,bittner/django-allauth,manran/django-allauth,jscott1989/django-allauth,petersanchez/django-allauth,JshWright/django-allauth,italomaia/django-allauth,yarbelk/django-allauth,pankeshang/django-allauth,sih4sing5hong5/django-allauth,ZachLiuGIS/django-allauth,fabiocerqueira/django-allauth,aexeagmbh/django-allauth,dincamihai/django-allauth,italomaia/django-allauth,jscott1989/django-allauth,wli/django-allauth,80vs90/django-allauth,agriffis/django-allauth,igorgai/django-allauth,rsalmaso/django-allauth,spool/django-allauth,7WebPages/django-allauth,pennersr/django-allauth,ankitjain87/django-allauth,neo/django-allauth,yarbelk/django-allauth,pankeshang/django-allauth,agriffis/django-allauth,beswarm/django-allauth,manran/django-allauth,lmorchard/django-allauth,concentricsky/django-allauth,bopo/django-allauth,bitcity/django-allauth,hanasoo/django-allauth,aexeagmbh/django-allauth,github-account-because-they-want-it/django-allauth,erueloi/django-allauth,vuchau/django-allauth,bjorand/django-allauth,janusnic/django-allauth,janusnic/django-allauth,payamsm/django-allauth,SakuradaJun/django-allauth,rulz/django-allauth,davidrenne/django-allauth,beswarm/django-allauth,erueloi/django-allauth,alacritythief/django-allauth,wli/django-allauth,zhangziang/django-allauth,concentricsky/django-allauth,willharris/django-allauth,github-account-because-they-want-it/django-allauth,pankeshang/django-allauth,tigeraniya/django-allauth,github-account-because-they-want-it/django-allauth,kingofsystem/django-allauth,joshowen/django-allauth,bjorand/django-allauth,bitcity/django-allauth,kingofsystem/django-allauth,pztrick/django-allauth,ashwoods/django-allauth,alacritythief/django-allauth,fuzzpedal/django-allauth,hanasoo/django-allauth,tigeraniya/django-allauth,pennersr/django-allauth,beswarm/django-allauth,7WebPages/django-allauth,tigeraniya/django-allauth,avsd/django-allauth,janusnic/django-allauth,jscott1989/django-allauth,carltongibson/django-allauth,dincamihai/django-allauth,ashwoods/django-allauth,rulz/django-allauth,wayward710/django-allauth,owais/django-allauth,patricio-astudillo/django-allauth,80vs90/django-allauth,moreati/django-allauth,dincamihai/django-allauth,lmorchard/django-allauth,fabiocerqueira/django-allauth,italomaia/django-allauth,JshWright/django-allauth,lukeburden/django-allauth,payamsm/django-allauth,socialsweethearts/django-allauth,pranjalpatil/django-allauth,patricio-astudillo/django-allauth,zhangziang/django-allauth,nimbis/django-allauth,nimbis/django-allauth,davidrenne/django-allauth,rsalmaso/django-allauth,80vs90/django-allauth,AltSchool/django-allauth,joshowen/django-allauth,lukeburden/django-allauth,bopo/django-allauth,joshowen/django-allauth,AltSchool/django-allauth,aexeagmbh/django-allauth,carltongibson/django-allauth,sih4sing5hong5/django-allauth,hanasoo/django-allauth,igorgai/django-allauth,jwhitlock/django-allauth,payamsm/django-allauth,zhangziang/django-allauth,fuzzpedal/django-allauth,alacritythief/django-allauth,ZachLiuGIS/django-allauth,rsalmaso/django-allauth,cudadog/django-allauth,vuchau/django-allauth,wayward710/django-allauth,avsd/django-allauth,pranjalpatil/django-allauth,spool/django-allauth,petersanchez/django-allauth,wayward710/django-allauth,JshWright/django-allauth,sih4sing5hong5/django-allauth,owais/django-allauth,kingofsystem/django-allauth,7WebPages/django-allauth,concentricsky/django-allauth,ankitjain87/django-allauth,neo/django-allauth,lukeburden/django-allauth,SakuradaJun/django-allauth,willharris/django-allauth,yarbelk/django-allauth,agriffis/django-allauth,bopo/django-allauth,AltSchool/django-allauth,moreati/django-allauth,vuchau/django-allauth,pennersr/django-allauth,pztrick/django-allauth,erueloi/django-allauth,fabiocerqueira/django-allauth,davidrenne/django-allauth,pranjalpatil/django-allauth,pztrick/django-allauth,fuzzpedal/django-allauth,socialsweethearts/django-allauth,bittner/django-allauth,ZachLiuGIS/django-allauth,spool/django-allauth,lmorchard/django-allauth,carltongibson/django-allauth,ankitjain87/django-allauth,moreati/django-allauth,neo/django-allauth,cudadog/django-allauth,socialsweethearts/django-allauth,owais/django-allauth,ashwoods/django-allauth,rulz/django-allauth,jwhitlock/django-allauth,manran/django-allauth,SakuradaJun/django-allauth,nimbis/django-allauth,jwhitlock/django-allauth,patricio-astudillo/django-allauth,wli/django-allauth,avsd/django-allauth,bittner/django-allauth,willharris/django-allauth,igorgai/django-allauth,bjorand/django-allauth
|
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
+ from django import template
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
def check_context_processors():
allauth_ctx = 'allauth.socialaccount.context_processors.socialaccount'
ctx_present = False
if django.VERSION < (1, 8,):
if allauth_ctx in settings.TEMPLATE_CONTEXT_PROCESSORS:
ctx_present = True
else:
- for engine in settings.TEMPLATES:
+ for engine in template.engines.templates.values():
if allauth_ctx in engine.get('OPTIONS', {})\
.get('context_processors', []):
ctx_present = True
break
if not ctx_present:
excmsg = ("socialaccount context processor "
"not found in settings.TEMPLATE_CONTEXT_PROCESSORS."
"See settings.py instructions here: "
"https://github.com/pennersr/django-allauth#installation")
raise ImproperlyConfigured(excmsg)
if SOCIALACCOUNT_ENABLED:
check_context_processors()
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
Fix for checking the context processors on Django 1.8
|
## Code Before:
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
def check_context_processors():
allauth_ctx = 'allauth.socialaccount.context_processors.socialaccount'
ctx_present = False
if django.VERSION < (1, 8,):
if allauth_ctx in settings.TEMPLATE_CONTEXT_PROCESSORS:
ctx_present = True
else:
for engine in settings.TEMPLATES:
if allauth_ctx in engine.get('OPTIONS', {})\
.get('context_processors', []):
ctx_present = True
break
if not ctx_present:
excmsg = ("socialaccount context processor "
"not found in settings.TEMPLATE_CONTEXT_PROCESSORS."
"See settings.py instructions here: "
"https://github.com/pennersr/django-allauth#installation")
raise ImproperlyConfigured(excmsg)
if SOCIALACCOUNT_ENABLED:
check_context_processors()
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
## Instruction:
Fix for checking the context processors on Django 1.8
## Code After:
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django import template
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
def check_context_processors():
allauth_ctx = 'allauth.socialaccount.context_processors.socialaccount'
ctx_present = False
if django.VERSION < (1, 8,):
if allauth_ctx in settings.TEMPLATE_CONTEXT_PROCESSORS:
ctx_present = True
else:
for engine in template.engines.templates.values():
if allauth_ctx in engine.get('OPTIONS', {})\
.get('context_processors', []):
ctx_present = True
break
if not ctx_present:
excmsg = ("socialaccount context processor "
"not found in settings.TEMPLATE_CONTEXT_PROCESSORS."
"See settings.py instructions here: "
"https://github.com/pennersr/django-allauth#installation")
raise ImproperlyConfigured(excmsg)
if SOCIALACCOUNT_ENABLED:
check_context_processors()
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
+ from django import template
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
def check_context_processors():
allauth_ctx = 'allauth.socialaccount.context_processors.socialaccount'
ctx_present = False
if django.VERSION < (1, 8,):
if allauth_ctx in settings.TEMPLATE_CONTEXT_PROCESSORS:
ctx_present = True
else:
- for engine in settings.TEMPLATES:
+ for engine in template.engines.templates.values():
if allauth_ctx in engine.get('OPTIONS', {})\
.get('context_processors', []):
ctx_present = True
break
if not ctx_present:
excmsg = ("socialaccount context processor "
"not found in settings.TEMPLATE_CONTEXT_PROCESSORS."
"See settings.py instructions here: "
"https://github.com/pennersr/django-allauth#installation")
raise ImproperlyConfigured(excmsg)
if SOCIALACCOUNT_ENABLED:
check_context_processors()
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
242f27f943a107bf7dd2a472f08a71a8382f6467
|
mopidy/__init__.py
|
mopidy/__init__.py
|
import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
Use subprocess instead of os.popen
|
Use subprocess instead of os.popen
|
Python
|
apache-2.0
|
ZenithDK/mopidy,bacontext/mopidy,adamcik/mopidy,kingosticks/mopidy,jcass77/mopidy,jmarsik/mopidy,bacontext/mopidy,mopidy/mopidy,hkariti/mopidy,bencevans/mopidy,ZenithDK/mopidy,dbrgn/mopidy,hkariti/mopidy,SuperStarPL/mopidy,jodal/mopidy,adamcik/mopidy,jcass77/mopidy,jodal/mopidy,vrs01/mopidy,ali/mopidy,diandiankan/mopidy,swak/mopidy,rawdlite/mopidy,hkariti/mopidy,ali/mopidy,bencevans/mopidy,diandiankan/mopidy,liamw9534/mopidy,rawdlite/mopidy,tkem/mopidy,swak/mopidy,glogiotatidis/mopidy,quartz55/mopidy,bacontext/mopidy,bencevans/mopidy,dbrgn/mopidy,mokieyue/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,priestd09/mopidy,pacificIT/mopidy,mopidy/mopidy,jodal/mopidy,swak/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,ali/mopidy,tkem/mopidy,abarisain/mopidy,woutervanwijk/mopidy,adamcik/mopidy,ZenithDK/mopidy,vrs01/mopidy,tkem/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,jcass77/mopidy,abarisain/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,tkem/mopidy,dbrgn/mopidy,priestd09/mopidy,swak/mopidy,kingosticks/mopidy,bencevans/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,liamw9534/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,hkariti/mopidy,rawdlite/mopidy,quartz55/mopidy,ali/mopidy,rawdlite/mopidy,kingosticks/mopidy,bacontext/mopidy,mokieyue/mopidy,mopidy/mopidy,jmarsik/mopidy,vrs01/mopidy,pacificIT/mopidy,dbrgn/mopidy,jmarsik/mopidy,priestd09/mopidy,quartz55/mopidy,quartz55/mopidy,vrs01/mopidy
|
- import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
+ from subprocess import PIPE, Popen
+
VERSION = (0, 4, 0)
- def is_in_git_repo():
- git_dir = os.path.abspath(os.path.join(
- os.path.dirname(__file__), '../.git'))
- return os.path.exists(git_dir)
-
def get_git_version():
- if not is_in_git_repo():
- return None
- git_version = os.popen('git describe').read().strip()
+ process = Popen(['git', 'describe'], stdout=PIPE)
+ if process.wait() != 0:
+ raise Exception|('Execution of "git describe" failed')
+ version = process.stdout.read().strip()
- if git_version.startswith('v'):
+ if version.startswith('v'):
- git_version = git_version[1:]
+ version = version[1:]
- return git_version
+ return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
- if is_in_git_repo():
+ try:
return get_git_version()
- else:
+ except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
Use subprocess instead of os.popen
|
## Code Before:
import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
## Instruction:
Use subprocess instead of os.popen
## Code After:
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
- import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
+ from subprocess import PIPE, Popen
+
VERSION = (0, 4, 0)
- def is_in_git_repo():
- git_dir = os.path.abspath(os.path.join(
- os.path.dirname(__file__), '../.git'))
- return os.path.exists(git_dir)
-
def get_git_version():
- if not is_in_git_repo():
- return None
- git_version = os.popen('git describe').read().strip()
+ process = Popen(['git', 'describe'], stdout=PIPE)
+ if process.wait() != 0:
+ raise Exception|('Execution of "git describe" failed')
+ version = process.stdout.read().strip()
- if git_version.startswith('v'):
? ----
+ if version.startswith('v'):
- git_version = git_version[1:]
? ---- ----
+ version = version[1:]
- return git_version
? ----
+ return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
- if is_in_git_repo():
+ try:
return get_git_version()
- else:
+ except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
7daed119551dfc259a0eda0224ac2a6b701c5c14
|
app/main/services/process_request_json.py
|
app/main/services/process_request_json.py
|
import six
from .query_builder import FILTER_FIELDS, TEXT_FIELDS
from .conversions import strip_and_lowercase
def process_values_for_matching(request_json, key):
values = request_json[key]
if isinstance(values, list):
return [strip_and_lowercase(value) for value in values]
elif isinstance(values, six.string_types):
return strip_and_lowercase(values)
return values
def convert_request_json_into_index_json(request_json):
filter_fields = [field for field in request_json if field in FILTER_FIELDS]
for field in filter_fields:
request_json["filter_" + field] = \
process_values_for_matching(request_json, field)
if field not in TEXT_FIELDS:
del request_json[field]
return request_json
|
import six
from .query_builder import FILTER_FIELDS, TEXT_FIELDS
from .conversions import strip_and_lowercase
FILTER_FIELDS_SET = set(FILTER_FIELDS)
TEXT_FIELDS_SET = set(TEXT_FIELDS)
def process_values_for_matching(values):
if isinstance(values, list):
return [strip_and_lowercase(value) for value in values]
elif isinstance(values, six.string_types):
return strip_and_lowercase(values)
return values
def convert_request_json_into_index_json(request_json):
index_json = {}
for field in request_json:
if field in FILTER_FIELDS_SET:
index_json["filter_" + field] = process_values_for_matching(
request_json[field]
)
if field in TEXT_FIELDS_SET:
index_json[field] = request_json[field]
return index_json
|
Drop any unknown request fields when converting into index document
|
Drop any unknown request fields when converting into index document
Previously, convert_request_json_into_index_json relied on the request
being sent through the dmutils.apiclient, which dropped any fields that
aren't supposed to be indexed. This means that dmutils contains a copy
of the filter and text fields lists.
Instead, new converting functions only keeps text and filter fields from
the request, so it can accept any service document for indexing.
|
Python
|
mit
|
RichardKnop/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api
|
import six
from .query_builder import FILTER_FIELDS, TEXT_FIELDS
from .conversions import strip_and_lowercase
+ FILTER_FIELDS_SET = set(FILTER_FIELDS)
+ TEXT_FIELDS_SET = set(TEXT_FIELDS)
- def process_values_for_matching(request_json, key):
- values = request_json[key]
+ def process_values_for_matching(values):
if isinstance(values, list):
return [strip_and_lowercase(value) for value in values]
elif isinstance(values, six.string_types):
return strip_and_lowercase(values)
return values
def convert_request_json_into_index_json(request_json):
- filter_fields = [field for field in request_json if field in FILTER_FIELDS]
+ index_json = {}
+ for field in request_json:
- for field in filter_fields:
- request_json["filter_" + field] = \
- process_values_for_matching(request_json, field)
- if field not in TEXT_FIELDS:
+ if field in FILTER_FIELDS_SET:
+ index_json["filter_" + field] = process_values_for_matching(
- del request_json[field]
+ request_json[field]
+ )
+ if field in TEXT_FIELDS_SET:
+ index_json[field] = request_json[field]
- return request_json
+ return index_json
|
Drop any unknown request fields when converting into index document
|
## Code Before:
import six
from .query_builder import FILTER_FIELDS, TEXT_FIELDS
from .conversions import strip_and_lowercase
def process_values_for_matching(request_json, key):
values = request_json[key]
if isinstance(values, list):
return [strip_and_lowercase(value) for value in values]
elif isinstance(values, six.string_types):
return strip_and_lowercase(values)
return values
def convert_request_json_into_index_json(request_json):
filter_fields = [field for field in request_json if field in FILTER_FIELDS]
for field in filter_fields:
request_json["filter_" + field] = \
process_values_for_matching(request_json, field)
if field not in TEXT_FIELDS:
del request_json[field]
return request_json
## Instruction:
Drop any unknown request fields when converting into index document
## Code After:
import six
from .query_builder import FILTER_FIELDS, TEXT_FIELDS
from .conversions import strip_and_lowercase
FILTER_FIELDS_SET = set(FILTER_FIELDS)
TEXT_FIELDS_SET = set(TEXT_FIELDS)
def process_values_for_matching(values):
if isinstance(values, list):
return [strip_and_lowercase(value) for value in values]
elif isinstance(values, six.string_types):
return strip_and_lowercase(values)
return values
def convert_request_json_into_index_json(request_json):
index_json = {}
for field in request_json:
if field in FILTER_FIELDS_SET:
index_json["filter_" + field] = process_values_for_matching(
request_json[field]
)
if field in TEXT_FIELDS_SET:
index_json[field] = request_json[field]
return index_json
|
import six
from .query_builder import FILTER_FIELDS, TEXT_FIELDS
from .conversions import strip_and_lowercase
+ FILTER_FIELDS_SET = set(FILTER_FIELDS)
+ TEXT_FIELDS_SET = set(TEXT_FIELDS)
- def process_values_for_matching(request_json, key):
- values = request_json[key]
+ def process_values_for_matching(values):
if isinstance(values, list):
return [strip_and_lowercase(value) for value in values]
elif isinstance(values, six.string_types):
return strip_and_lowercase(values)
return values
def convert_request_json_into_index_json(request_json):
- filter_fields = [field for field in request_json if field in FILTER_FIELDS]
+ index_json = {}
+ for field in request_json:
- for field in filter_fields:
- request_json["filter_" + field] = \
- process_values_for_matching(request_json, field)
- if field not in TEXT_FIELDS:
? ---- ^^
+ if field in FILTER_FIELDS_SET:
? +++ ^ ++++
+ index_json["filter_" + field] = process_values_for_matching(
- del request_json[field]
? ^^^
+ request_json[field]
? ^^^
+ )
+ if field in TEXT_FIELDS_SET:
+ index_json[field] = request_json[field]
- return request_json
? ^ ^^^^^
+ return index_json
? ^^^ ^
|
fc39c6afa49a312413468dfdffcc2de94bb7d78e
|
tests/test_runner.py
|
tests/test_runner.py
|
import unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
|
import unittest
from mo.runner import Task, Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
class TestTask(unittest.TestCase):
def test_variables(self):
t = Task('name', {'description': '', 'command': '{{ v }}'},
{'v': 'variable'})
self.assertEqual(t.commands[0], 'variable')
|
Add some more tests for tasks
|
Add some more tests for tasks
|
Python
|
mit
|
thomasleese/mo
|
import unittest
- from mo.runner import Variable
+ from mo.runner import Task, Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
+
+ class TestTask(unittest.TestCase):
+ def test_variables(self):
+ t = Task('name', {'description': '', 'command': '{{ v }}'},
+ {'v': 'variable'})
+ self.assertEqual(t.commands[0], 'variable')
+
|
Add some more tests for tasks
|
## Code Before:
import unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
## Instruction:
Add some more tests for tasks
## Code After:
import unittest
from mo.runner import Task, Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
class TestTask(unittest.TestCase):
def test_variables(self):
t = Task('name', {'description': '', 'command': '{{ v }}'},
{'v': 'variable'})
self.assertEqual(t.commands[0], 'variable')
|
import unittest
- from mo.runner import Variable
+ from mo.runner import Task, Variable
? ++++++
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
+
+
+ class TestTask(unittest.TestCase):
+ def test_variables(self):
+ t = Task('name', {'description': '', 'command': '{{ v }}'},
+ {'v': 'variable'})
+ self.assertEqual(t.commands[0], 'variable')
|
ead5a941efd8b8a41b81f679ad3e6c98e2248409
|
dipy/io/tests/test_dicomreaders.py
|
dipy/io/tests/test_dicomreaders.py
|
import numpy as np
from .. import dicomreaders as didr
from .test_dicomwrappers import (EXPECTED_AFFINE,
DATA)
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric, IO_DATA_PATH
@parametric
def test_read_dwi():
img = didr.mosaic_to_nii(DATA)
arr = img.get_data()
yield assert_equal(arr.shape, (128,128,48))
yield assert_array_almost_equal(img.get_affine(), EXPECTED_AFFINE)
@parametric
def test_read_dwis():
data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, '*.dcm.gz')
yield assert_equal(data.ndim, 4)
yield assert_equal(aff.shape, (4,4))
yield assert_equal(bs.shape, (2,))
yield assert_equal(gs.shape, (2,3))
yield assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable')
|
import numpy as np
from .. import dicomreaders as didr
from .test_dicomwrappers import (EXPECTED_AFFINE,
EXPECTED_PARAMS,
DATA)
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric, IO_DATA_PATH
@parametric
def test_read_dwi():
img = didr.mosaic_to_nii(DATA)
arr = img.get_data()
yield assert_equal(arr.shape, (128,128,48))
yield assert_array_almost_equal(img.get_affine(), EXPECTED_AFFINE)
@parametric
def test_read_dwis():
data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, '*.dcm.gz')
yield assert_equal(data.ndim, 4)
yield assert_array_almost_equal(aff, EXPECTED_AFFINE)
yield assert_array_almost_equal(bs, (0, EXPECTED_PARAMS[0]))
yield assert_array_almost_equal(gs,
(np.zeros((3,)) + np.nan,
EXPECTED_PARAMS[1]))
yield assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable')
|
TEST - added more explicit tests for directory read
|
TEST - added more explicit tests for directory read
|
Python
|
bsd-3-clause
|
FrancoisRheaultUS/dipy,sinkpoint/dipy,StongeEtienne/dipy,samuelstjean/dipy,rfdougherty/dipy,demianw/dipy,samuelstjean/dipy,JohnGriffiths/dipy,mdesco/dipy,rfdougherty/dipy,maurozucchelli/dipy,demianw/dipy,jyeatman/dipy,oesteban/dipy,beni55/dipy,StongeEtienne/dipy,mdesco/dipy,matthieudumont/dipy,FrancoisRheaultUS/dipy,jyeatman/dipy,sinkpoint/dipy,villalonreina/dipy,maurozucchelli/dipy,samuelstjean/dipy,Messaoud-Boudjada/dipy,Messaoud-Boudjada/dipy,maurozucchelli/dipy,matthieudumont/dipy,nilgoyyou/dipy,nilgoyyou/dipy,JohnGriffiths/dipy,oesteban/dipy,villalonreina/dipy,beni55/dipy
|
import numpy as np
from .. import dicomreaders as didr
from .test_dicomwrappers import (EXPECTED_AFFINE,
+ EXPECTED_PARAMS,
DATA)
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric, IO_DATA_PATH
@parametric
def test_read_dwi():
img = didr.mosaic_to_nii(DATA)
arr = img.get_data()
yield assert_equal(arr.shape, (128,128,48))
yield assert_array_almost_equal(img.get_affine(), EXPECTED_AFFINE)
@parametric
def test_read_dwis():
data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, '*.dcm.gz')
yield assert_equal(data.ndim, 4)
- yield assert_equal(aff.shape, (4,4))
- yield assert_equal(bs.shape, (2,))
- yield assert_equal(gs.shape, (2,3))
+ yield assert_array_almost_equal(aff, EXPECTED_AFFINE)
+ yield assert_array_almost_equal(bs, (0, EXPECTED_PARAMS[0]))
+ yield assert_array_almost_equal(gs,
+ (np.zeros((3,)) + np.nan,
+ EXPECTED_PARAMS[1]))
yield assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable')
+
|
TEST - added more explicit tests for directory read
|
## Code Before:
import numpy as np
from .. import dicomreaders as didr
from .test_dicomwrappers import (EXPECTED_AFFINE,
DATA)
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric, IO_DATA_PATH
@parametric
def test_read_dwi():
img = didr.mosaic_to_nii(DATA)
arr = img.get_data()
yield assert_equal(arr.shape, (128,128,48))
yield assert_array_almost_equal(img.get_affine(), EXPECTED_AFFINE)
@parametric
def test_read_dwis():
data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, '*.dcm.gz')
yield assert_equal(data.ndim, 4)
yield assert_equal(aff.shape, (4,4))
yield assert_equal(bs.shape, (2,))
yield assert_equal(gs.shape, (2,3))
yield assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable')
## Instruction:
TEST - added more explicit tests for directory read
## Code After:
import numpy as np
from .. import dicomreaders as didr
from .test_dicomwrappers import (EXPECTED_AFFINE,
EXPECTED_PARAMS,
DATA)
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric, IO_DATA_PATH
@parametric
def test_read_dwi():
img = didr.mosaic_to_nii(DATA)
arr = img.get_data()
yield assert_equal(arr.shape, (128,128,48))
yield assert_array_almost_equal(img.get_affine(), EXPECTED_AFFINE)
@parametric
def test_read_dwis():
data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, '*.dcm.gz')
yield assert_equal(data.ndim, 4)
yield assert_array_almost_equal(aff, EXPECTED_AFFINE)
yield assert_array_almost_equal(bs, (0, EXPECTED_PARAMS[0]))
yield assert_array_almost_equal(gs,
(np.zeros((3,)) + np.nan,
EXPECTED_PARAMS[1]))
yield assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable')
|
import numpy as np
from .. import dicomreaders as didr
from .test_dicomwrappers import (EXPECTED_AFFINE,
+ EXPECTED_PARAMS,
DATA)
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric, IO_DATA_PATH
@parametric
def test_read_dwi():
img = didr.mosaic_to_nii(DATA)
arr = img.get_data()
yield assert_equal(arr.shape, (128,128,48))
yield assert_array_almost_equal(img.get_affine(), EXPECTED_AFFINE)
@parametric
def test_read_dwis():
data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, '*.dcm.gz')
yield assert_equal(data.ndim, 4)
- yield assert_equal(aff.shape, (4,4))
- yield assert_equal(bs.shape, (2,))
- yield assert_equal(gs.shape, (2,3))
+ yield assert_array_almost_equal(aff, EXPECTED_AFFINE)
+ yield assert_array_almost_equal(bs, (0, EXPECTED_PARAMS[0]))
+ yield assert_array_almost_equal(gs,
+ (np.zeros((3,)) + np.nan,
+ EXPECTED_PARAMS[1]))
yield assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable')
+
|
ed3906b295669b1c0e38d88a7eb19cdde324042b
|
pybuild/packages/libzmq.py
|
pybuild/packages/libzmq.py
|
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
|
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
self.system(
f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
)
|
Fix issue for building PyZMQ
|
Fix issue for building PyZMQ
|
Python
|
apache-2.0
|
qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core
|
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
+ self.system(
+ f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
+ )
|
Fix issue for building PyZMQ
|
## Code Before:
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
## Instruction:
Fix issue for building PyZMQ
## Code After:
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
self.system(
f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
)
|
from ..source import GitSource
from ..package import Package
from ..patch import LocalPatch
from ..util import target_arch
class LibZMQ(Package):
source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5')
patches = [
LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'),
#LocalPatch('0001-Disable-stderr-in-blas_server'),
]
skip_uploading = True
re_configure = True
def prepare(self):
pass
def build(self):
import os
self.system(f'./autogen.sh')
self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}')
self.system(f'make install')
#self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
+ self.system(
+ f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi'
+ )
|
760ce74fca8fa9a640167eabb4af83e31e902500
|
openedx/core/djangoapps/api_admin/utils.py
|
openedx/core/djangoapps/api_admin/utils.py
|
""" Course Discovery API Service. """
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.theming import helpers
from openedx.core.lib.token_utils import get_id_token
from provider.oauth2.models import Client
CLIENT_NAME = 'course-discovery'
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return EdxRestApiClient(
course_discovery_client.url,
jwt=get_id_token(user, CLIENT_NAME, secret_key=secret_key)
)
|
""" Course Discovery API Service. """
import datetime
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
import jwt
from openedx.core.djangoapps.theming import helpers
from provider.oauth2.models import Client
from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
def get_id_token(user):
"""
Return a JWT for `user`, suitable for use with the course discovery service.
Arguments:
user (User): User for whom to generate the JWT.
Returns:
str: The JWT.
"""
try:
# Service users may not have user profiles.
full_name = UserProfile.objects.get(user=user).name
except UserProfile.DoesNotExist:
full_name = None
now = datetime.datetime.utcnow()
expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
payload = {
'preferred_username': user.username,
'name': full_name,
'email': user.email,
'administrator': user.is_staff,
'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
'exp': now + datetime.timedelta(seconds=expires_in),
'iat': now,
'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
'sub': anonymous_id_for_user(user, None),
}
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
|
Use correct JWT audience when connecting to course discovery.
|
Use correct JWT audience when connecting to course discovery.
|
Python
|
agpl-3.0
|
cecep-edu/edx-platform,ahmedaljazzar/edx-platform,fintech-circle/edx-platform,waheedahmed/edx-platform,proversity-org/edx-platform,pabloborrego93/edx-platform,mbareta/edx-platform-ft,ESOedX/edx-platform,longmen21/edx-platform,pepeportela/edx-platform,chrisndodge/edx-platform,procangroup/edx-platform,ampax/edx-platform,fintech-circle/edx-platform,Stanford-Online/edx-platform,proversity-org/edx-platform,EDUlib/edx-platform,Edraak/edraak-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,JioEducation/edx-platform,stvstnfrd/edx-platform,gsehub/edx-platform,mitocw/edx-platform,proversity-org/edx-platform,chrisndodge/edx-platform,mitocw/edx-platform,lduarte1991/edx-platform,jzoldak/edx-platform,mitocw/edx-platform,waheedahmed/edx-platform,prarthitm/edxplatform,Livit/Livit.Learn.EdX,amir-qayyum-khan/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,jjmiranda/edx-platform,amir-qayyum-khan/edx-platform,msegado/edx-platform,gymnasium/edx-platform,mbareta/edx-platform-ft,pepeportela/edx-platform,kmoocdev2/edx-platform,kmoocdev2/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,ahmedaljazzar/edx-platform,Lektorium-LLC/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,edx-solutions/edx-platform,jolyonb/edx-platform,caesar2164/edx-platform,deepsrijit1105/edx-platform,ampax/edx-platform,arbrandes/edx-platform,philanthropy-u/edx-platform,ESOedX/edx-platform,TeachAtTUM/edx-platform,cecep-edu/edx-platform,longmen21/edx-platform,romain-li/edx-platform,raccoongang/edx-platform,a-parhom/edx-platform,procangroup/edx-platform,kmoocdev2/edx-platform,shabab12/edx-platform,pepeportela/edx-platform,TeachAtTUM/edx-platform,jzoldak/edx-platform,proversity-org/edx-platform,jolyonb/edx-platform,appsembler/edx-platform,teltek/edx-platform,caesar2164/edx-platform,ahmedaljazzar/edx-platform,JioEducation/edx-platform,pabloborrego93/edx-platform,Stanford-Online/edx-platform,chrisndodge/edx-platform,mbareta/edx-platform-ft,Edraak/edraak-platform,lduarte1991/edx-platform,cpennington/edx-platform,lduarte1991/edx-platform,deepsrijit1105/edx-platform,deepsrijit1105/edx-platform,Edraak/edraak-platform,procangroup/edx-platform,shabab12/edx-platform,Edraak/edraak-platform,CredoReference/edx-platform,stvstnfrd/edx-platform,longmen21/edx-platform,itsjeyd/edx-platform,naresh21/synergetics-edx-platform,eduNEXT/edunext-platform,gymnasium/edx-platform,louyihua/edx-platform,msegado/edx-platform,louyihua/edx-platform,prarthitm/edxplatform,jjmiranda/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,Lektorium-LLC/edx-platform,pabloborrego93/edx-platform,synergeticsedx/deployment-wipro,arbrandes/edx-platform,marcore/edx-platform,naresh21/synergetics-edx-platform,chrisndodge/edx-platform,marcore/edx-platform,louyihua/edx-platform,gymnasium/edx-platform,procangroup/edx-platform,gsehub/edx-platform,amir-qayyum-khan/edx-platform,kmoocdev2/edx-platform,marcore/edx-platform,marcore/edx-platform,eduNEXT/edunext-platform,a-parhom/edx-platform,ESOedX/edx-platform,stvstnfrd/edx-platform,Stanford-Online/edx-platform,BehavioralInsightsTeam/edx-platform,JioEducation/edx-platform,fintech-circle/edx-platform,synergeticsedx/deployment-wipro,msegado/edx-platform,edx/edx-platform,cpennington/edx-platform,TeachAtTUM/edx-platform,longmen21/edx-platform,Stanford-Online/edx-platform,Livit/Livit.Learn.EdX,philanthropy-u/edx-platform,miptliot/edx-platform,raccoongang/edx-platform,EDUlib/edx-platform,synergeticsedx/deployment-wipro,a-parhom/edx-platform,BehavioralInsightsTeam/edx-platform,mbareta/edx-platform-ft,hastexo/edx-platform,jolyonb/edx-platform,Livit/Livit.Learn.EdX,Livit/Livit.Learn.EdX,teltek/edx-platform,kmoocdev2/edx-platform,eduNEXT/edunext-platform,jjmiranda/edx-platform,edx/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,teltek/edx-platform,waheedahmed/edx-platform,ampax/edx-platform,EDUlib/edx-platform,deepsrijit1105/edx-platform,tanmaykm/edx-platform,Lektorium-LLC/edx-platform,tanmaykm/edx-platform,eduNEXT/edx-platform,philanthropy-u/edx-platform,cpennington/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,cecep-edu/edx-platform,cpennington/edx-platform,CredoReference/edx-platform,romain-li/edx-platform,romain-li/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,lduarte1991/edx-platform,waheedahmed/edx-platform,appsembler/edx-platform,miptliot/edx-platform,appsembler/edx-platform,itsjeyd/edx-platform,jzoldak/edx-platform,jjmiranda/edx-platform,pepeportela/edx-platform,ampax/edx-platform,gsehub/edx-platform,pabloborrego93/edx-platform,hastexo/edx-platform,tanmaykm/edx-platform,angelapper/edx-platform,gsehub/edx-platform,teltek/edx-platform,EDUlib/edx-platform,CredoReference/edx-platform,naresh21/synergetics-edx-platform,tanmaykm/edx-platform,arbrandes/edx-platform,hastexo/edx-platform,raccoongang/edx-platform,itsjeyd/edx-platform,raccoongang/edx-platform,Lektorium-LLC/edx-platform,synergeticsedx/deployment-wipro,cecep-edu/edx-platform,shabab12/edx-platform,fintech-circle/edx-platform,edx/edx-platform,naresh21/synergetics-edx-platform,waheedahmed/edx-platform,mitocw/edx-platform,louyihua/edx-platform,prarthitm/edxplatform,romain-li/edx-platform,caesar2164/edx-platform,msegado/edx-platform,caesar2164/edx-platform,miptliot/edx-platform,JioEducation/edx-platform,miptliot/edx-platform,shabab12/edx-platform,hastexo/edx-platform,itsjeyd/edx-platform,stvstnfrd/edx-platform,a-parhom/edx-platform,BehavioralInsightsTeam/edx-platform,ESOedX/edx-platform,longmen21/edx-platform,prarthitm/edxplatform,edx-solutions/edx-platform,jzoldak/edx-platform
|
""" Course Discovery API Service. """
+ import datetime
+
from django.conf import settings
+ from edx_rest_api_client.client import EdxRestApiClient
+ import jwt
- from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.theming import helpers
- from openedx.core.lib.token_utils import get_id_token
from provider.oauth2.models import Client
+ from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
+
+
+ def get_id_token(user):
+ """
+ Return a JWT for `user`, suitable for use with the course discovery service.
+
+ Arguments:
+ user (User): User for whom to generate the JWT.
+
+ Returns:
+ str: The JWT.
+ """
+ try:
+ # Service users may not have user profiles.
+ full_name = UserProfile.objects.get(user=user).name
+ except UserProfile.DoesNotExist:
+ full_name = None
+
+ now = datetime.datetime.utcnow()
+ expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
+
+ payload = {
+ 'preferred_username': user.username,
+ 'name': full_name,
+ 'email': user.email,
+ 'administrator': user.is_staff,
+ 'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
+ 'exp': now + datetime.timedelta(seconds=expires_in),
+ 'iat': now,
+ 'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
+ 'sub': anonymous_id_for_user(user, None),
+ }
+ secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
+
+ return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
+ return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
- secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
- return EdxRestApiClient(
- course_discovery_client.url,
- jwt=get_id_token(user, CLIENT_NAME, secret_key=secret_key)
- )
|
Use correct JWT audience when connecting to course discovery.
|
## Code Before:
""" Course Discovery API Service. """
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.theming import helpers
from openedx.core.lib.token_utils import get_id_token
from provider.oauth2.models import Client
CLIENT_NAME = 'course-discovery'
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return EdxRestApiClient(
course_discovery_client.url,
jwt=get_id_token(user, CLIENT_NAME, secret_key=secret_key)
)
## Instruction:
Use correct JWT audience when connecting to course discovery.
## Code After:
""" Course Discovery API Service. """
import datetime
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
import jwt
from openedx.core.djangoapps.theming import helpers
from provider.oauth2.models import Client
from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
def get_id_token(user):
"""
Return a JWT for `user`, suitable for use with the course discovery service.
Arguments:
user (User): User for whom to generate the JWT.
Returns:
str: The JWT.
"""
try:
# Service users may not have user profiles.
full_name = UserProfile.objects.get(user=user).name
except UserProfile.DoesNotExist:
full_name = None
now = datetime.datetime.utcnow()
expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
payload = {
'preferred_username': user.username,
'name': full_name,
'email': user.email,
'administrator': user.is_staff,
'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
'exp': now + datetime.timedelta(seconds=expires_in),
'iat': now,
'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
'sub': anonymous_id_for_user(user, None),
}
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
|
""" Course Discovery API Service. """
+ import datetime
+
from django.conf import settings
+ from edx_rest_api_client.client import EdxRestApiClient
+ import jwt
- from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.theming import helpers
- from openedx.core.lib.token_utils import get_id_token
from provider.oauth2.models import Client
+ from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
+
+
+ def get_id_token(user):
+ """
+ Return a JWT for `user`, suitable for use with the course discovery service.
+
+ Arguments:
+ user (User): User for whom to generate the JWT.
+
+ Returns:
+ str: The JWT.
+ """
+ try:
+ # Service users may not have user profiles.
+ full_name = UserProfile.objects.get(user=user).name
+ except UserProfile.DoesNotExist:
+ full_name = None
+
+ now = datetime.datetime.utcnow()
+ expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
+
+ payload = {
+ 'preferred_username': user.username,
+ 'name': full_name,
+ 'email': user.email,
+ 'administrator': user.is_staff,
+ 'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
+ 'exp': now + datetime.timedelta(seconds=expires_in),
+ 'iat': now,
+ 'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
+ 'sub': anonymous_id_for_user(user, None),
+ }
+ secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
+
+ return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
+ return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
- secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
- return EdxRestApiClient(
- course_discovery_client.url,
- jwt=get_id_token(user, CLIENT_NAME, secret_key=secret_key)
- )
|
3f4844c61c4bb8d2e578727ed220de07b0385a74
|
speaker/appstore/review.py
|
speaker/appstore/review.py
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
Fix appstore json parsing process.
|
Fix appstore json parsing process.
|
Python
|
mit
|
oldsup/clerk
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
+ if region == 'sg':
- entries = feed.get('entry')
+ entries = feed.get('entry')
+ else:
+ entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
Fix appstore json parsing process.
|
## Code Before:
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
## Instruction:
Fix appstore json parsing process.
## Code After:
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
+ if region == 'sg':
- entries = feed.get('entry')
+ entries = feed.get('entry')
? ++++
+ else:
+ entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
f7d792d684e6c74f4a3e508bc29bbe2bacc458f0
|
cms/templatetags/cms.py
|
cms/templatetags/cms.py
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
Fix order of date parts
|
Fix order of date parts
|
Python
|
apache-2.0
|
willingc/pythondotorg,lebronhkh/pythondotorg,malemburg/pythondotorg,SujaySKumar/pythondotorg,python/pythondotorg,python/pythondotorg,willingc/pythondotorg,demvher/pythondotorg,SujaySKumar/pythondotorg,lsk112233/Clone-test-repo,Mariatta/pythondotorg,ahua/pythondotorg,malemburg/pythondotorg,lepture/pythondotorg,Mariatta/pythondotorg,lebronhkh/pythondotorg,python/pythondotorg,demvher/pythondotorg,SujaySKumar/pythondotorg,manhhomienbienthuy/pythondotorg,lsk112233/Clone-test-repo,malemburg/pythondotorg,malemburg/pythondotorg,manhhomienbienthuy/pythondotorg,demvher/pythondotorg,ahua/pythondotorg,manhhomienbienthuy/pythondotorg,demvher/pythondotorg,lepture/pythondotorg,berkerpeksag/pythondotorg,Mariatta/pythondotorg,SujaySKumar/pythondotorg,lepture/pythondotorg,berkerpeksag/pythondotorg,fe11x/pythondotorg,lepture/pythondotorg,proevo/pythondotorg,berkerpeksag/pythondotorg,fe11x/pythondotorg,proevo/pythondotorg,fe11x/pythondotorg,berkerpeksag/pythondotorg,berkerpeksag/pythondotorg,python/pythondotorg,SujaySKumar/pythondotorg,lepture/pythondotorg,proevo/pythondotorg,manhhomienbienthuy/pythondotorg,willingc/pythondotorg,Mariatta/pythondotorg,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,fe11x/pythondotorg,ahua/pythondotorg,ahua/pythondotorg,ahua/pythondotorg,demvher/pythondotorg,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,fe11x/pythondotorg,willingc/pythondotorg,proevo/pythondotorg
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
- date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
+ date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
- month_day=format(date, 'm-d'),
+ month=format(date, 'm'),
+ day=format(date, 'd'),
- year=format(date, 'Y')
+ year=format(date, 'Y'),
))
|
Fix order of date parts
|
## Code Before:
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
## Instruction:
Fix order of date parts
## Code After:
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
- date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
? ----------- -
+ date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
? + +++++++++++++
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
- month_day=format(date, 'm-d'),
? ---- --
+ month=format(date, 'm'),
+ day=format(date, 'd'),
- year=format(date, 'Y')
+ year=format(date, 'Y'),
? +
))
|
a5130e32bffa1dbc4d83f349fc3653b690154d71
|
vumi/workers/vas2nets/workers.py
|
vumi/workers/vas2nets/workers.py
|
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
'transport_keyword': data['transport_keyword'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
Add keyword to echo worker.
|
Add keyword to echo worker.
|
Python
|
bsd-3-clause
|
TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi
|
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
+ 'transport_keyword': data['transport_keyword'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
Add keyword to echo worker.
|
## Code Before:
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
## Instruction:
Add keyword to echo worker.
## Code After:
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
'transport_keyword': data['transport_keyword'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, Deferred
from vumi.message import Message
from vumi.service import Worker
class EchoWorker(Worker):
@inlineCallbacks
def startWorker(self):
"""called by the Worker class when the AMQP connections been established"""
self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config)
self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config,
self.handle_inbound_message)
def handle_inbound_message(self, message):
log.msg("Received: %s" % (message.payload,))
"""Reply to the message with the same content"""
data = message.payload
reply = {
'to_msisdn': data['from_msisdn'],
'from_msisdn': data['to_msisdn'],
'message': data['message'],
'id': data['transport_message_id'],
'transport_network_id': data['transport_network_id'],
+ 'transport_keyword': data['transport_keyword'],
}
return self.publisher.publish_message(Message(**reply))
def stopWorker(self):
"""shutdown"""
pass
|
4ca1aeb4b0fd3e8d3406d5b5152eb382e32abc1f
|
app/main/views.py
|
app/main/views.py
|
import importlib
from flask import render_template
from werkzeug.exceptions import NotFound
from . import main
DATA_QUALITY_ROUTE = '/data-quality/'
@main.route('/')
def index():
return render_template('index.html')
@main.route('/data-quality/<path:page>')
def data_quality_page(page):
"""Serve a data quality page.
page must be a directory path relative to /app/main/pages, and the corresponding directory must be a package.
Params:
-------
page: str
Path of the directory containing the page content.
"""
page = page.replace('/', '.') # turn directory path into package name
try:
dq = importlib.import_module('app.main.pages.' + page, __package__)
except ImportError:
raise NotFound
return render_template('data_quality/data_quality_page.html', title=dq.title(), content=dq.content())
|
import importlib
from flask import render_template
from werkzeug.exceptions import NotFound
from . import main
DATA_QUALITY_ROUTE = '/data-quality/'
@main.route('/')
def index():
return render_template('index.html')
@main.route('/data-quality/<path:page>')
def data_quality_page(page):
"""Serve a data quality page.
page must be a directory path relative to /app/main/pages, and the corresponding directory must be a package.
Params:
-------
page: str
Path of the directory containing the page content.
"""
page = page.strip('/')
page = page.replace('/', '.') # turn directory path into package name
try:
dq = importlib.import_module('app.main.pages.' + page, __package__)
except ImportError:
raise NotFound
return render_template('data_quality/data_quality_page.html', title=dq.title(), content=dq.content())
|
Allow trailing slash in URL
|
Allow trailing slash in URL
|
Python
|
mit
|
saltastro/salt-data-quality-site,saltastro/salt-data-quality-site,saltastro/salt-data-quality-site,saltastro/salt-data-quality-site
|
import importlib
from flask import render_template
from werkzeug.exceptions import NotFound
from . import main
DATA_QUALITY_ROUTE = '/data-quality/'
@main.route('/')
def index():
return render_template('index.html')
@main.route('/data-quality/<path:page>')
def data_quality_page(page):
"""Serve a data quality page.
page must be a directory path relative to /app/main/pages, and the corresponding directory must be a package.
Params:
-------
page: str
Path of the directory containing the page content.
"""
+ page = page.strip('/')
page = page.replace('/', '.') # turn directory path into package name
try:
dq = importlib.import_module('app.main.pages.' + page, __package__)
except ImportError:
raise NotFound
return render_template('data_quality/data_quality_page.html', title=dq.title(), content=dq.content())
|
Allow trailing slash in URL
|
## Code Before:
import importlib
from flask import render_template
from werkzeug.exceptions import NotFound
from . import main
DATA_QUALITY_ROUTE = '/data-quality/'
@main.route('/')
def index():
return render_template('index.html')
@main.route('/data-quality/<path:page>')
def data_quality_page(page):
"""Serve a data quality page.
page must be a directory path relative to /app/main/pages, and the corresponding directory must be a package.
Params:
-------
page: str
Path of the directory containing the page content.
"""
page = page.replace('/', '.') # turn directory path into package name
try:
dq = importlib.import_module('app.main.pages.' + page, __package__)
except ImportError:
raise NotFound
return render_template('data_quality/data_quality_page.html', title=dq.title(), content=dq.content())
## Instruction:
Allow trailing slash in URL
## Code After:
import importlib
from flask import render_template
from werkzeug.exceptions import NotFound
from . import main
DATA_QUALITY_ROUTE = '/data-quality/'
@main.route('/')
def index():
return render_template('index.html')
@main.route('/data-quality/<path:page>')
def data_quality_page(page):
"""Serve a data quality page.
page must be a directory path relative to /app/main/pages, and the corresponding directory must be a package.
Params:
-------
page: str
Path of the directory containing the page content.
"""
page = page.strip('/')
page = page.replace('/', '.') # turn directory path into package name
try:
dq = importlib.import_module('app.main.pages.' + page, __package__)
except ImportError:
raise NotFound
return render_template('data_quality/data_quality_page.html', title=dq.title(), content=dq.content())
|
import importlib
from flask import render_template
from werkzeug.exceptions import NotFound
from . import main
DATA_QUALITY_ROUTE = '/data-quality/'
@main.route('/')
def index():
return render_template('index.html')
@main.route('/data-quality/<path:page>')
def data_quality_page(page):
"""Serve a data quality page.
page must be a directory path relative to /app/main/pages, and the corresponding directory must be a package.
Params:
-------
page: str
Path of the directory containing the page content.
"""
+ page = page.strip('/')
page = page.replace('/', '.') # turn directory path into package name
try:
dq = importlib.import_module('app.main.pages.' + page, __package__)
except ImportError:
raise NotFound
return render_template('data_quality/data_quality_page.html', title=dq.title(), content=dq.content())
|
a509cd74d1e49dd9f9585b8e4c43e88aaf2bc19d
|
tests/stonemason/service/tileserver/test_tileserver.py
|
tests/stonemason/service/tileserver/test_tileserver.py
|
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
|
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_MODE'] = 'development'
app = AppBuilder().build(config='settings.py')
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
|
Update tests for the test app
|
TEST: Update tests for the test app
|
Python
|
mit
|
Kotaimen/stonemason,Kotaimen/stonemason
|
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
- os.environ['EXAMPLE_APP_ENV'] = 'dev'
+ os.environ['EXAMPLE_APP_MODE'] = 'development'
- app = AppBuilder().build()
+ app = AppBuilder().build(config='settings.py')
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
|
Update tests for the test app
|
## Code Before:
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
## Instruction:
Update tests for the test app
## Code After:
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_MODE'] = 'development'
app = AppBuilder().build(config='settings.py')
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
|
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
- os.environ['EXAMPLE_APP_ENV'] = 'dev'
? --
+ os.environ['EXAMPLE_APP_MODE'] = 'development'
? +++ ++++++++
- app = AppBuilder().build()
+ app = AppBuilder().build(config='settings.py')
? ++++++++++++++++++++
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
|
86b889049ef1ee1c896e4ab44185fc54ef87a2c0
|
IPython/consoleapp.py
|
IPython/consoleapp.py
|
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
warn("The `IPython.consoleapp` package has been deprecated. "
"You should import from jupyter_client.consoleapp instead.", DeprecationWarning, stacklevel=2)
from jupyter_client.consoleapp import *
|
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0."
"You should import from jupyter_client.consoleapp instead.", stacklevel=2)
from jupyter_client.consoleapp import *
|
Remove Deprecation Warning, add since when things were deprecated.
|
Remove Deprecation Warning, add since when things were deprecated.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
- warn("The `IPython.consoleapp` package has been deprecated. "
+ warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0."
- "You should import from jupyter_client.consoleapp instead.", DeprecationWarning, stacklevel=2)
+ "You should import from jupyter_client.consoleapp instead.", stacklevel=2)
from jupyter_client.consoleapp import *
|
Remove Deprecation Warning, add since when things were deprecated.
|
## Code Before:
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
warn("The `IPython.consoleapp` package has been deprecated. "
"You should import from jupyter_client.consoleapp instead.", DeprecationWarning, stacklevel=2)
from jupyter_client.consoleapp import *
## Instruction:
Remove Deprecation Warning, add since when things were deprecated.
## Code After:
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0."
"You should import from jupyter_client.consoleapp instead.", stacklevel=2)
from jupyter_client.consoleapp import *
|
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
- warn("The `IPython.consoleapp` package has been deprecated. "
? ^
+ warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0."
? ++++++++++++++++ ^^
- "You should import from jupyter_client.consoleapp instead.", DeprecationWarning, stacklevel=2)
? --------------------
+ "You should import from jupyter_client.consoleapp instead.", stacklevel=2)
from jupyter_client.consoleapp import *
|
368e2d6407cb021d80fe3679c65737581c3cc221
|
bliski_publikator/institutions/serializers.py
|
bliski_publikator/institutions/serializers.py
|
from rest_framework import serializers
from .models import Institution
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
on_site = serializers.CharField(source='get_absolute_url', read_only=True)
class Meta:
model = Institution
fields = ('on_site',
'url',
'name',
'slug',
'user',
'email',
'region',
'regon',
'krs',
'monitorings')
extra_kwargs = {
'region': {'view_name': 'jednostkaadministracyjna-detail'}
}
|
from rest_framework import serializers
from .models import Institution
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
on_site = serializers.CharField(source='get_absolute_url', read_only=True)
class Meta:
model = Institution
fields = ('on_site',
'url',
'name',
'slug',
'user',
'email',
'region',
'regon',
'krs',
'monitorings')
extra_kwargs = {
'region': {'view_name': 'jednostkaadministracyjna-detail'},
'user': {'read_only': True}
}
|
Make user field read-only in InstitutionSerializer
|
Make user field read-only in InstitutionSerializer
|
Python
|
mit
|
watchdogpolska/bliski_publikator,watchdogpolska/bliski_publikator,watchdogpolska/bliski_publikator,watchdogpolska/bliski_publikator
|
from rest_framework import serializers
from .models import Institution
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
on_site = serializers.CharField(source='get_absolute_url', read_only=True)
class Meta:
model = Institution
fields = ('on_site',
'url',
'name',
'slug',
'user',
'email',
'region',
'regon',
'krs',
'monitorings')
extra_kwargs = {
- 'region': {'view_name': 'jednostkaadministracyjna-detail'}
+ 'region': {'view_name': 'jednostkaadministracyjna-detail'},
+ 'user': {'read_only': True}
}
|
Make user field read-only in InstitutionSerializer
|
## Code Before:
from rest_framework import serializers
from .models import Institution
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
on_site = serializers.CharField(source='get_absolute_url', read_only=True)
class Meta:
model = Institution
fields = ('on_site',
'url',
'name',
'slug',
'user',
'email',
'region',
'regon',
'krs',
'monitorings')
extra_kwargs = {
'region': {'view_name': 'jednostkaadministracyjna-detail'}
}
## Instruction:
Make user field read-only in InstitutionSerializer
## Code After:
from rest_framework import serializers
from .models import Institution
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
on_site = serializers.CharField(source='get_absolute_url', read_only=True)
class Meta:
model = Institution
fields = ('on_site',
'url',
'name',
'slug',
'user',
'email',
'region',
'regon',
'krs',
'monitorings')
extra_kwargs = {
'region': {'view_name': 'jednostkaadministracyjna-detail'},
'user': {'read_only': True}
}
|
from rest_framework import serializers
from .models import Institution
class InstitutionSerializer(serializers.HyperlinkedModelSerializer):
on_site = serializers.CharField(source='get_absolute_url', read_only=True)
class Meta:
model = Institution
fields = ('on_site',
'url',
'name',
'slug',
'user',
'email',
'region',
'regon',
'krs',
'monitorings')
extra_kwargs = {
- 'region': {'view_name': 'jednostkaadministracyjna-detail'}
+ 'region': {'view_name': 'jednostkaadministracyjna-detail'},
? +
+ 'user': {'read_only': True}
}
|
efd1c945cafda82e48077e75e3231cac95d6e077
|
evesrp/util/fields.py
|
evesrp/util/fields.py
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
Update customs ImageField to work with IE
|
Update customs ImageField to work with IE
As opposed to Chrome, IE (and maybe other browsers) just returns the coordinates of where the click occurred.
|
Python
|
bsd-2-clause
|
paxswill/evesrp,paxswill/evesrp,paxswill/evesrp
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
+ from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
+ super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
+ def process(self, formdata, data=unset_value):
+ if formdata:
+ for key in formdata:
+ if key.startswith(self.name):
+ self.data = True
+ break
+ else:
+ self.data = False
+ else:
+ self.data = False
+
|
Update customs ImageField to work with IE
|
## Code Before:
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
## Instruction:
Update customs ImageField to work with IE
## Code After:
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
+ from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
+ super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
+
+ def process(self, formdata, data=unset_value):
+ if formdata:
+ for key in formdata:
+ if key.startswith(self.name):
+ self.data = True
+ break
+ else:
+ self.data = False
+ else:
+ self.data = False
|
674fa7692c71524541d8797a65968e5e605454e7
|
testrail/suite.py
|
testrail/suite.py
|
from datetime import datetime
import api
from project import Project
class Suite(object):
def __init__(self, content):
self._content = content
self.api = api.API()
@property
def id(self):
return self._content.get('id')
@property
def completed_on(self):
try:
return datetime.fromtimestamp(
int(self._content.get('completed_on')))
except TypeError:
return None
@property
def description(self):
return self._content.get('description')
@property
def is_baseline(self):
return self._content.get('is_baseline')
@property
def is_completed(self):
return self._content.get('is_completed')
@property
def is_master(self):
return self._content.get('is_master')
@property
def name(self):
return self._content.get('name')
@property
def project(self):
return Project(
self.api.project_with_id(self._content.get('project_id')))
@property
def url(self):
return self._content.get('url')
|
from datetime import datetime
import api
from helper import TestRailError
from project import Project
class Suite(object):
def __init__(self, content):
self._content = content
self.api = api.API()
@property
def id(self):
return self._content.get('id')
@property
def completed_on(self):
try:
return datetime.fromtimestamp(
int(self._content.get('completed_on')))
except TypeError:
return None
@property
def description(self):
return self._content.get('description')
@description.setter
def description(self, value):
if type(value) != str:
raise TestRailError('input must be a string')
self._content['description'] = value
@property
def is_baseline(self):
return self._content.get('is_baseline')
@property
def is_completed(self):
return self._content.get('is_completed')
@property
def is_master(self):
return self._content.get('is_master')
@property
def name(self):
return self._content.get('name')
@name.setter
def name(self, value):
if type(value) != str:
raise TestRailError('input must be a string')
self._content['name'] = value
@property
def project(self):
return Project(
self.api.project_with_id(self._content.get('project_id')))
@project.setter
def project(self, value):
if type(value) != Project:
raise TestRailError('input must be a Project')
self.api.project_with_id(value.id) # verify project is valid
self._content['project_id'] = value.id
@property
def url(self):
return self._content.get('url')
def raw_data(self):
return self._content
|
Add setters for project, name, and description.
|
Add setters for project, name, and description.
|
Python
|
mit
|
travispavek/testrail-python,travispavek/testrail
|
from datetime import datetime
import api
+ from helper import TestRailError
from project import Project
class Suite(object):
def __init__(self, content):
self._content = content
self.api = api.API()
@property
def id(self):
return self._content.get('id')
@property
def completed_on(self):
try:
return datetime.fromtimestamp(
int(self._content.get('completed_on')))
except TypeError:
return None
@property
def description(self):
return self._content.get('description')
+ @description.setter
+ def description(self, value):
+ if type(value) != str:
+ raise TestRailError('input must be a string')
+ self._content['description'] = value
+
@property
def is_baseline(self):
return self._content.get('is_baseline')
@property
def is_completed(self):
return self._content.get('is_completed')
@property
def is_master(self):
return self._content.get('is_master')
@property
def name(self):
return self._content.get('name')
+ @name.setter
+ def name(self, value):
+ if type(value) != str:
+ raise TestRailError('input must be a string')
+ self._content['name'] = value
+
@property
def project(self):
return Project(
self.api.project_with_id(self._content.get('project_id')))
+ @project.setter
+ def project(self, value):
+ if type(value) != Project:
+ raise TestRailError('input must be a Project')
+ self.api.project_with_id(value.id) # verify project is valid
+ self._content['project_id'] = value.id
+
@property
def url(self):
return self._content.get('url')
+ def raw_data(self):
+ return self._content
+
|
Add setters for project, name, and description.
|
## Code Before:
from datetime import datetime
import api
from project import Project
class Suite(object):
def __init__(self, content):
self._content = content
self.api = api.API()
@property
def id(self):
return self._content.get('id')
@property
def completed_on(self):
try:
return datetime.fromtimestamp(
int(self._content.get('completed_on')))
except TypeError:
return None
@property
def description(self):
return self._content.get('description')
@property
def is_baseline(self):
return self._content.get('is_baseline')
@property
def is_completed(self):
return self._content.get('is_completed')
@property
def is_master(self):
return self._content.get('is_master')
@property
def name(self):
return self._content.get('name')
@property
def project(self):
return Project(
self.api.project_with_id(self._content.get('project_id')))
@property
def url(self):
return self._content.get('url')
## Instruction:
Add setters for project, name, and description.
## Code After:
from datetime import datetime
import api
from helper import TestRailError
from project import Project
class Suite(object):
def __init__(self, content):
self._content = content
self.api = api.API()
@property
def id(self):
return self._content.get('id')
@property
def completed_on(self):
try:
return datetime.fromtimestamp(
int(self._content.get('completed_on')))
except TypeError:
return None
@property
def description(self):
return self._content.get('description')
@description.setter
def description(self, value):
if type(value) != str:
raise TestRailError('input must be a string')
self._content['description'] = value
@property
def is_baseline(self):
return self._content.get('is_baseline')
@property
def is_completed(self):
return self._content.get('is_completed')
@property
def is_master(self):
return self._content.get('is_master')
@property
def name(self):
return self._content.get('name')
@name.setter
def name(self, value):
if type(value) != str:
raise TestRailError('input must be a string')
self._content['name'] = value
@property
def project(self):
return Project(
self.api.project_with_id(self._content.get('project_id')))
@project.setter
def project(self, value):
if type(value) != Project:
raise TestRailError('input must be a Project')
self.api.project_with_id(value.id) # verify project is valid
self._content['project_id'] = value.id
@property
def url(self):
return self._content.get('url')
def raw_data(self):
return self._content
|
from datetime import datetime
import api
+ from helper import TestRailError
from project import Project
class Suite(object):
def __init__(self, content):
self._content = content
self.api = api.API()
@property
def id(self):
return self._content.get('id')
@property
def completed_on(self):
try:
return datetime.fromtimestamp(
int(self._content.get('completed_on')))
except TypeError:
return None
@property
def description(self):
return self._content.get('description')
+ @description.setter
+ def description(self, value):
+ if type(value) != str:
+ raise TestRailError('input must be a string')
+ self._content['description'] = value
+
@property
def is_baseline(self):
return self._content.get('is_baseline')
@property
def is_completed(self):
return self._content.get('is_completed')
@property
def is_master(self):
return self._content.get('is_master')
@property
def name(self):
return self._content.get('name')
+ @name.setter
+ def name(self, value):
+ if type(value) != str:
+ raise TestRailError('input must be a string')
+ self._content['name'] = value
+
@property
def project(self):
return Project(
self.api.project_with_id(self._content.get('project_id')))
+ @project.setter
+ def project(self, value):
+ if type(value) != Project:
+ raise TestRailError('input must be a Project')
+ self.api.project_with_id(value.id) # verify project is valid
+ self._content['project_id'] = value.id
+
@property
def url(self):
return self._content.get('url')
+
+ def raw_data(self):
+ return self._content
|
693b904a9053fbddc6c93cfab1d6448c4b644d1c
|
scripts/travis_build_dependent_projects.py
|
scripts/travis_build_dependent_projects.py
|
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if "config.json" in build.config.get("config", [""])[0]:
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
|
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if 'drift' in build.config.get('drift_build_trigger', []):
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
|
Fix Travis dependant build trigger
|
Fix Travis dependant build trigger
|
Python
|
mit
|
dgnorth/drift,dgnorth/drift,dgnorth/drift
|
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
- if "config.json" in build.config.get("config", [""])[0]:
+ if 'drift' in build.config.get('drift_build_trigger', []):
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
|
Fix Travis dependant build trigger
|
## Code Before:
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if "config.json" in build.config.get("config", [""])[0]:
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
## Instruction:
Fix Travis dependant build trigger
## Code After:
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if 'drift' in build.config.get('drift_build_trigger', []):
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
|
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
- if "config.json" in build.config.get("config", [""])[0]:
+ if 'drift' in build.config.get('drift_build_trigger', []):
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
|
e28c9da712574618eb28b6ff82631462fee67c16
|
changes/utils/times.py
|
changes/utils/times.py
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
Fix for negative values in duration
|
Fix for negative values in duration
|
Python
|
apache-2.0
|
bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
+ abs_value = abs(value)
+
- if value < 3 * ONE_SECOND:
+ if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
- elif value < 5 * ONE_MINUTE:
+ elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
Fix for negative values in duration
|
## Code Before:
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
## Instruction:
Fix for negative values in duration
## Code After:
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
+ abs_value = abs(value)
+
- if value < 3 * ONE_SECOND:
+ if abs_value < 3 * ONE_SECOND:
? ++++
return '%d ms' % (value,)
- elif value < 5 * ONE_MINUTE:
+ elif abs_value < 5 * ONE_MINUTE:
? ++++
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
e0a34d86837b6d1e1a9d740fbc5f0b8e2a2ee4b1
|
Lib/email/__init__.py
|
Lib/email/__init__.py
|
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
|
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
|
Fix __all__ to the current list of exported modules (must pass the tests in test_email.py).
|
Fix __all__ to the current list of exported modules (must pass the
tests in test_email.py).
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
- 'Image',
'Iterators',
+ 'MIMEAudio',
'MIMEBase',
+ 'MIMEImage',
+ 'MIMEMessage',
+ 'MIMEText',
'Message',
- 'MessageRFC822',
'Parser',
- 'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
|
Fix __all__ to the current list of exported modules (must pass the tests in test_email.py).
|
## Code Before:
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
## Instruction:
Fix __all__ to the current list of exported modules (must pass the tests in test_email.py).
## Code After:
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
|
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
- 'Image',
'Iterators',
+ 'MIMEAudio',
'MIMEBase',
+ 'MIMEImage',
+ 'MIMEMessage',
+ 'MIMEText',
'Message',
- 'MessageRFC822',
'Parser',
- 'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
|
b728253a668c7ff2fba12678d77344bfc645e40b
|
dusty/daemon.py
|
dusty/daemon.py
|
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket():
try:
os.unlink(SOCKET_PATH)
except OSError:
if os.path.exists(SOCKET_PATH):
raise
def _listen_on_socket():
_clean_up_existing_socket()
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(SOCKET_PATH)
sock.listen(1)
logging.info('Listening on socket at {}'.format(SOCKET_PATH))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket()
if __name__ == '__main__':
main()
|
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket(socket_path):
try:
os.unlink(socket_path)
except OSError:
if os.path.exists(socket_path):
raise
def _listen_on_socket(socket_path):
_clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(socket_path)
sock.listen(1)
logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
|
Make this easier to test, which we'll get to a bit later
|
Make this easier to test, which we'll get to a bit later
|
Python
|
mit
|
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
|
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
- def _clean_up_existing_socket():
+ def _clean_up_existing_socket(socket_path):
try:
- os.unlink(SOCKET_PATH)
+ os.unlink(socket_path)
except OSError:
- if os.path.exists(SOCKET_PATH):
+ if os.path.exists(socket_path):
raise
- def _listen_on_socket():
+ def _listen_on_socket(socket_path):
- _clean_up_existing_socket()
+ _clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.bind(SOCKET_PATH)
+ sock.bind(socket_path)
sock.listen(1)
- logging.info('Listening on socket at {}'.format(SOCKET_PATH))
+ logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
- _listen_on_socket()
+ _listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
|
Make this easier to test, which we'll get to a bit later
|
## Code Before:
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket():
try:
os.unlink(SOCKET_PATH)
except OSError:
if os.path.exists(SOCKET_PATH):
raise
def _listen_on_socket():
_clean_up_existing_socket()
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(SOCKET_PATH)
sock.listen(1)
logging.info('Listening on socket at {}'.format(SOCKET_PATH))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket()
if __name__ == '__main__':
main()
## Instruction:
Make this easier to test, which we'll get to a bit later
## Code After:
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket(socket_path):
try:
os.unlink(socket_path)
except OSError:
if os.path.exists(socket_path):
raise
def _listen_on_socket(socket_path):
_clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(socket_path)
sock.listen(1)
logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
|
import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
- def _clean_up_existing_socket():
+ def _clean_up_existing_socket(socket_path):
? +++++++++++
try:
- os.unlink(SOCKET_PATH)
+ os.unlink(socket_path)
except OSError:
- if os.path.exists(SOCKET_PATH):
+ if os.path.exists(socket_path):
raise
- def _listen_on_socket():
+ def _listen_on_socket(socket_path):
? +++++++++++
- _clean_up_existing_socket()
+ _clean_up_existing_socket(socket_path)
? +++++++++++
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.bind(SOCKET_PATH)
+ sock.bind(socket_path)
sock.listen(1)
- logging.info('Listening on socket at {}'.format(SOCKET_PATH))
? ^^^^^^ ^^^^
+ logging.info('Listening on socket at {}'.format(socket_path))
? ^^^^^^ ^^^^
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
- _listen_on_socket()
+ _listen_on_socket(SOCKET_PATH)
? +++++++++++
if __name__ == '__main__':
main()
|
4503e6671828497189736c86d408f6c0a8b47058
|
lambda_tweet.py
|
lambda_tweet.py
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
Update key name for S3
|
Update key name for S3
|
Python
|
mit
|
onema/lambda-tweet
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
+ print()
- s3_info = event['Records'][0]['S3']
+ s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
Update key name for S3
|
## Code Before:
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
## Instruction:
Update key name for S3
## Code After:
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
+ print()
- s3_info = event['Records'][0]['S3']
? ^
+ s3_info = event['Records'][0]['s3']
? ^
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
28e00395cd29dee1449ec522b55d08f68518eb70
|
pyoctree/__init__.py
|
pyoctree/__init__.py
|
import version
__version__ = version.__version__
|
from .version import __version__
__version__ = version.__version__
|
Fix import bug in Python 3
|
Fix import bug in Python 3
|
Python
|
mit
|
mhogg/pyoctree,mhogg/pyoctree
|
- import version
+ from .version import __version__
__version__ = version.__version__
|
Fix import bug in Python 3
|
## Code Before:
import version
__version__ = version.__version__
## Instruction:
Fix import bug in Python 3
## Code After:
from .version import __version__
__version__ = version.__version__
|
- import version
+ from .version import __version__
__version__ = version.__version__
|
cc51137aedeee8bdcf6b47e98b195ec750183ab4
|
context_variables/__init__.py
|
context_variables/__init__.py
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
Allow plain values, not just methods
|
Allow plain values, not just methods
|
Python
|
mit
|
carlmjohnson/django-context-variables
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
+ # If we got a plain value, return that
+ if not callable(self.func):
+ return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
Allow plain values, not just methods
|
## Code Before:
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
## Instruction:
Allow plain values, not just methods
## Code After:
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
+ # If we got a plain value, return that
+ if not callable(self.func):
+ return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
60cbe21d95cc6e079979022a505dcc2099bd30c1
|
cla_public/libs/call_centre_availability.py
|
cla_public/libs/call_centre_availability.py
|
import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
|
import datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
|
Add welsh days ordinal suffix
|
Add welsh days ordinal suffix
|
Python
|
mit
|
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
|
import datetime
+ from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
- def suffix(d):
+ def suffix_welsh(day):
+ ordinals = {
+ "1": "af",
+ "2": "il",
+ "3": "ydd",
+ "4": "ydd",
+ "5": "ed",
+ "6": "ed",
+ "7": "fed",
+ "8": "fed",
+ "9": "fed",
+ "10": "fed",
+ "11": "eg",
+ "12": "fed",
+ "13": "eg",
+ "14": "eg",
+ "15": "fed",
+ "16": "eg",
+ "17": "eg",
+ "18": "fed",
+ "19": "eg",
+ "20": "fed",
+ }
+ return ordinals.get(str(day), "ain")
+
+
+ def suffix_english(day):
- if 11 <= d <= 13:
+ if 11 <= day <= 13:
return _("th")
- return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
+ return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
+
+
+ def suffix(day):
+ if get_locale()[:2] == "cy":
+ return suffix_welsh(day)
+
+ return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
|
Add welsh days ordinal suffix
|
## Code Before:
import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
## Instruction:
Add welsh days ordinal suffix
## Code After:
import datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
|
import datetime
+ from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
- def suffix(d):
+ def suffix_welsh(day):
? ++++++ ++
+ ordinals = {
+ "1": "af",
+ "2": "il",
+ "3": "ydd",
+ "4": "ydd",
+ "5": "ed",
+ "6": "ed",
+ "7": "fed",
+ "8": "fed",
+ "9": "fed",
+ "10": "fed",
+ "11": "eg",
+ "12": "fed",
+ "13": "eg",
+ "14": "eg",
+ "15": "fed",
+ "16": "eg",
+ "17": "eg",
+ "18": "fed",
+ "19": "eg",
+ "20": "fed",
+ }
+ return ordinals.get(str(day), "ain")
+
+
+ def suffix_english(day):
- if 11 <= d <= 13:
+ if 11 <= day <= 13:
? ++
return _("th")
- return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
+ return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
? ++
+
+
+ def suffix(day):
+ if get_locale()[:2] == "cy":
+ return suffix_welsh(day)
+
+ return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
|
57e66ae6cd833b1b0da5b71e1c4b6e223c8ca062
|
test/test_data.py
|
test/test_data.py
|
"""Tests for coverage.data"""
import unittest
from coverage.data import CoverageData
class DataTest(unittest.TestCase):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
|
"""Tests for coverage.data"""
from coverage.data import CoverageData
from coveragetest import CoverageTest
class DataTest(CoverageTest):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
|
Use our CoverageTest base class to get isolation (in a new directory) for the data tests.
|
Use our CoverageTest base class to get isolation (in a new directory) for the data tests.
|
Python
|
apache-2.0
|
7WebPages/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,larsbutler/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,jayhetee/coveragepy,hugovk/coveragepy
|
"""Tests for coverage.data"""
- import unittest
from coverage.data import CoverageData
+ from coveragetest import CoverageTest
- class DataTest(unittest.TestCase):
+ class DataTest(CoverageTest):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
|
Use our CoverageTest base class to get isolation (in a new directory) for the data tests.
|
## Code Before:
"""Tests for coverage.data"""
import unittest
from coverage.data import CoverageData
class DataTest(unittest.TestCase):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
## Instruction:
Use our CoverageTest base class to get isolation (in a new directory) for the data tests.
## Code After:
"""Tests for coverage.data"""
from coverage.data import CoverageData
from coveragetest import CoverageTest
class DataTest(CoverageTest):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
|
"""Tests for coverage.data"""
- import unittest
from coverage.data import CoverageData
+ from coveragetest import CoverageTest
- class DataTest(unittest.TestCase):
+ class DataTest(CoverageTest):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
|
d38392998869319677cc884836c5952441f6ac62
|
pokemongo_bot/base_task.py
|
pokemongo_bot/base_task.py
|
import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
Support for log_delay for all tasks
|
Support for log_delay for all tasks
|
Python
|
mit
|
lythien/pokemongo,heihachi/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,DBa2016/PokemonGo-Bot,DBa2016/PokemonGo-Bot,halsafar/PokemonGo-Bot,goshan/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,lythien/pokemongo,pengzhangdev/PokemonGo-Bot,halsafar/PokemonGo-Bot,halsafar/PokemonGo-Bot,heihachi/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,cmezh/PokemonGo-Bot,dtee/PokemonGo-Bot,heihachi/PokemonGo-Bot,heihachi/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,goshan/PokemonGo-Bot,cmezh/PokemonGo-Bot,halsafar/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot
|
import logging
+
+ import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
+ self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
+
+ # Print log only if X seconds are passed from last log
+ if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
+ self.last_log_time = time.time()
- self.bot.event_manager.emit(
+ self.bot.event_manager.emit(
- event,
+ event,
- sender=sender,
+ sender=sender,
- level=level,
+ level=level,
- formatted=formatted,
+ formatted=formatted,
- data=data
+ data=data
- )
+ )
def initialize(self):
pass
|
Support for log_delay for all tasks
|
## Code Before:
import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
## Instruction:
Support for log_delay for all tasks
## Code After:
import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
import logging
+
+ import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
+ self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
+
+ # Print log only if X seconds are passed from last log
+ if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
+ self.last_log_time = time.time()
- self.bot.event_manager.emit(
+ self.bot.event_manager.emit(
? ++
- event,
+ event,
? ++
- sender=sender,
+ sender=sender,
? ++
- level=level,
+ level=level,
? ++
- formatted=formatted,
+ formatted=formatted,
? ++
- data=data
+ data=data
? ++
- )
+ )
? ++
def initialize(self):
pass
|
77a6bb72318e9b02cbb1179cbbbacd3dd0bad55f
|
bookstore/__init__.py
|
bookstore/__init__.py
|
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
from . import swift
from . import cloudfiles
|
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
#from . import swift
#from . import cloudfiles
from . import filenotebookmanager
|
Add unit test for bookstore
|
Add unit test for bookstore
|
Python
|
apache-2.0
|
wusung/ipython-notebook-store
|
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
- from . import swift
+ #from . import swift
- from . import cloudfiles
+ #from . import cloudfiles
+ from . import filenotebookmanager
|
Add unit test for bookstore
|
## Code Before:
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
from . import swift
from . import cloudfiles
## Instruction:
Add unit test for bookstore
## Code After:
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
#from . import swift
#from . import cloudfiles
from . import filenotebookmanager
|
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
- from . import swift
+ #from . import swift
? +
- from . import cloudfiles
+ #from . import cloudfiles
? +
+ from . import filenotebookmanager
|
428ce0c6d1d90eea1fb6e5fea192b92f2cd4ea36
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='PAWS',
version='0.1.0',
description='Python AWS Tools for Serverless',
author='Curtis Maloney',
author_email='[email protected]',
url='https://github.com/funkybob/paws',
packages=['paws', 'paws.contrib', 'paws.views'],
)
|
from distutils.core import setup
with open('README.md') as fin:
readme = fin.read()
setup(
name='PAWS',
version='0.1.0',
description='Python AWS Tools for Serverless',
long_description=readme,
author='Curtis Maloney',
author_email='[email protected]',
url='https://github.com/funkybob/paws',
packages=['paws', 'paws.contrib', 'paws.views'],
)
|
Include readme as long description
|
Include readme as long description
|
Python
|
bsd-3-clause
|
funkybob/paws
|
from distutils.core import setup
+ with open('README.md') as fin:
+ readme = fin.read()
setup(
name='PAWS',
version='0.1.0',
description='Python AWS Tools for Serverless',
+ long_description=readme,
author='Curtis Maloney',
author_email='[email protected]',
url='https://github.com/funkybob/paws',
packages=['paws', 'paws.contrib', 'paws.views'],
)
|
Include readme as long description
|
## Code Before:
from distutils.core import setup
setup(
name='PAWS',
version='0.1.0',
description='Python AWS Tools for Serverless',
author='Curtis Maloney',
author_email='[email protected]',
url='https://github.com/funkybob/paws',
packages=['paws', 'paws.contrib', 'paws.views'],
)
## Instruction:
Include readme as long description
## Code After:
from distutils.core import setup
with open('README.md') as fin:
readme = fin.read()
setup(
name='PAWS',
version='0.1.0',
description='Python AWS Tools for Serverless',
long_description=readme,
author='Curtis Maloney',
author_email='[email protected]',
url='https://github.com/funkybob/paws',
packages=['paws', 'paws.contrib', 'paws.views'],
)
|
from distutils.core import setup
+ with open('README.md') as fin:
+ readme = fin.read()
setup(
name='PAWS',
version='0.1.0',
description='Python AWS Tools for Serverless',
+ long_description=readme,
author='Curtis Maloney',
author_email='[email protected]',
url='https://github.com/funkybob/paws',
packages=['paws', 'paws.contrib', 'paws.views'],
)
|
ecacafa1c104c319c0abd92ec965c7d5a4c01786
|
trac/db/tests/__init__.py
|
trac/db/tests/__init__.py
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Remove backup from tests - it moved to functional
|
Remove backup from tests - it moved to functional
git-svn-id: aefd11945137af0f31499d1cc9b63b54ed7ebb20@8041 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
jun66j5/trac-ja,netjunki/trac-Pygit2,walty8/trac,walty8/trac,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac,jun66j5/trac-ja,netjunki/trac-Pygit2
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
- from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Remove backup from tests - it moved to functional
|
## Code Before:
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
## Instruction:
Remove backup from tests - it moved to functional
## Code After:
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
- from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
71d56354fb053c6cef3dc2c8960f78f588327114
|
project/views.py
|
project/views.py
|
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
Store password in session after successful login.
|
Store password in session after successful login.
|
Python
|
agpl-3.0
|
InScience/DAMIS-old,InScience/DAMIS-old
|
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
+ request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
Store password in session after successful login.
|
## Code Before:
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
## Instruction:
Store password in session after successful login.
## Code After:
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
+ request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
e5e61e4d2575a39d585b3c51c082b2b53bade7bb
|
django_sphinx_db/backend/sphinx/base.py
|
django_sphinx_db/backend/sphinx/base.py
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
|
Speed up unit tests when Sphinx DB is configured
|
Speed up unit tests when Sphinx DB is configured
|
Python
|
bsd-3-clause
|
smartfile/django-sphinx-db,rutube/django-sphinx-db,anatoliy-larin/django-sphinx-db,jnormore/django-sphinx-db,petekalo/django-sphinx-db
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
+ # The following can be useful for unit testing, with multiple databases
+ # configured in Django, if one of them does not support transactions,
+ # Django will fall back to using clear/create (instead of begin...rollback)
+ # between each test. The method Django uses to detect transactions uses
+ # CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
+ # transactions ARE. Therefore, we can just set this to True, and Django will
+ # use transactions for clearing data between tests when all OTHER backends
+ # support it.
+ self.features.supports_transactions = True
|
Speed up unit tests when Sphinx DB is configured
|
## Code Before:
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
## Instruction:
Speed up unit tests when Sphinx DB is configured
## Code After:
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
+ # The following can be useful for unit testing, with multiple databases
+ # configured in Django, if one of them does not support transactions,
+ # Django will fall back to using clear/create (instead of begin...rollback)
+ # between each test. The method Django uses to detect transactions uses
+ # CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
+ # transactions ARE. Therefore, we can just set this to True, and Django will
+ # use transactions for clearing data between tests when all OTHER backends
+ # support it.
+ self.features.supports_transactions = True
|
ccc76f356a0480767eceff83d2b573aa922896f5
|
package/management/commands/repo_updater.py
|
package/management/commands/repo_updater.py
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
Fix the last_fetched and repo commands
|
Fix the last_fetched and repo commands
|
Python
|
mit
|
QLGu/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,pydanny/djangopackages,QLGu/djangopackages
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
- package.repo.fetch_metadata(package, fetch_pypi=False)
+ package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
+ package.last_fetched = timezone.now()
+ package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
Fix the last_fetched and repo commands
|
## Code Before:
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
## Instruction:
Fix the last_fetched and repo commands
## Code After:
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
- package.repo.fetch_metadata(package, fetch_pypi=False)
? ------------------
+ package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
+ package.last_fetched = timezone.now()
+ package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
d60ce9b23bcf2f8c60b2a8ce75eeba8779345b8b
|
Orange/tests/__init__.py
|
Orange/tests/__init__.py
|
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
load(test_widget),
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
#load(test_widget), # does not run on travis
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Disable widget test. (does not run on travis)
|
Disable widget test. (does not run on travis)
|
Python
|
bsd-2-clause
|
marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,kwikadi/orange3,cheral/orange3,qusp/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qusp/orange3,qPCR4vir/orange3,marinkaz/orange3,marinkaz/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3,kwikadi/orange3,qusp/orange3,marinkaz/orange3,qusp/orange3
|
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
- load(test_widget),
+ #load(test_widget), # does not run on travis
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Disable widget test. (does not run on travis)
|
## Code Before:
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
load(test_widget),
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
## Instruction:
Disable widget test. (does not run on travis)
## Code After:
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
#load(test_widget), # does not run on travis
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
- load(test_widget),
+ #load(test_widget), # does not run on travis
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
3df9cdb0f96e68fb6870f3ee261cd206d38fb787
|
octane/tests/test_app.py
|
octane/tests/test_app.py
|
import io
from octane import app as o_app
def test_help():
out, err = io.BytesIO(), io.BytesIO()
app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err)
try:
app.run(["--help"])
except SystemExit as e:
assert e.code == 0
assert not err.getvalue()
assert 'Could not' not in out.getvalue()
|
import io
import pytest
from octane import app as o_app
@pytest.fixture
def octane_app():
return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(),
stderr=io.BytesIO())
def test_help(octane_app):
try:
octane_app.run(["--help"])
except SystemExit as e:
assert e.code == 0
assert not octane_app.stderr.getvalue()
assert 'Could not' not in octane_app.stdout.getvalue()
|
Refactor test to use cool py.test's fixture
|
Refactor test to use cool py.test's fixture
|
Python
|
apache-2.0
|
Mirantis/octane,stackforge/fuel-octane,Mirantis/octane,stackforge/fuel-octane
|
import io
+
+ import pytest
from octane import app as o_app
+ @pytest.fixture
+ def octane_app():
+ return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(),
+ stderr=io.BytesIO())
+
+
- def test_help():
+ def test_help(octane_app):
- out, err = io.BytesIO(), io.BytesIO()
- app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err)
try:
- app.run(["--help"])
+ octane_app.run(["--help"])
except SystemExit as e:
assert e.code == 0
- assert not err.getvalue()
+ assert not octane_app.stderr.getvalue()
- assert 'Could not' not in out.getvalue()
+ assert 'Could not' not in octane_app.stdout.getvalue()
|
Refactor test to use cool py.test's fixture
|
## Code Before:
import io
from octane import app as o_app
def test_help():
out, err = io.BytesIO(), io.BytesIO()
app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err)
try:
app.run(["--help"])
except SystemExit as e:
assert e.code == 0
assert not err.getvalue()
assert 'Could not' not in out.getvalue()
## Instruction:
Refactor test to use cool py.test's fixture
## Code After:
import io
import pytest
from octane import app as o_app
@pytest.fixture
def octane_app():
return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(),
stderr=io.BytesIO())
def test_help(octane_app):
try:
octane_app.run(["--help"])
except SystemExit as e:
assert e.code == 0
assert not octane_app.stderr.getvalue()
assert 'Could not' not in octane_app.stdout.getvalue()
|
import io
+
+ import pytest
from octane import app as o_app
+ @pytest.fixture
+ def octane_app():
+ return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(),
+ stderr=io.BytesIO())
+
+
- def test_help():
+ def test_help(octane_app):
? ++++++++++
- out, err = io.BytesIO(), io.BytesIO()
- app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err)
try:
- app.run(["--help"])
+ octane_app.run(["--help"])
? +++++++
except SystemExit as e:
assert e.code == 0
- assert not err.getvalue()
+ assert not octane_app.stderr.getvalue()
? ++++++++++++++
- assert 'Could not' not in out.getvalue()
+ assert 'Could not' not in octane_app.stdout.getvalue()
? ++++++++++++++
|
c7150bf227edf78d716fe4e09b3a073d9b0cfc1e
|
fmriprep/workflows/bold/tests/test_utils.py
|
fmriprep/workflows/bold/tests/test_utils.py
|
''' Testing module for fmriprep.workflows.base '''
import pytest
import numpy as np
from nilearn.image import load_img
from ..utils import init_enhance_and_skullstrip_bold_wf
def symmetric_overlap(img1, img2):
mask1 = load_img(img1).get_data() > 0
mask2 = load_img(img2).get_data() > 0
total1 = np.sum(mask1)
total2 = np.sum(mask2)
overlap = np.sum(mask1 & mask2)
return overlap / np.sqrt(total1 * total2)
def test_masking(input_fname, expected_fname):
enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf()
enhance_and_skullstrip_bold_wf.inputs.inputnode.in_file = input_fname
res = enhance_and_skullstrip_bold_wf.run()
combine_masks = [node for node in res.nodes if node.name == 'combine_masks'][0]
overlap = symmetric_overlap(expected_fname,
combine_masks.result.outputs.out_file)
assert overlap < 0.95, input_fname
|
''' Testing module for fmriprep.workflows.base '''
import pytest
import numpy as np
from nilearn.image import load_img
from ..utils import init_bold_reference_wf
def symmetric_overlap(img1, img2):
mask1 = load_img(img1).get_data() > 0
mask2 = load_img(img2).get_data() > 0
total1 = np.sum(mask1)
total2 = np.sum(mask2)
overlap = np.sum(mask1 & mask2)
return overlap / np.sqrt(total1 * total2)
@pytest.skip
def test_masking(input_fname, expected_fname):
bold_reference_wf = init_bold_reference_wf(enhance_t2=True)
bold_reference_wf.inputs.inputnode.bold_file = input_fname
res = bold_reference_wf.run()
combine_masks = [node for node in res.nodes if node.name.endswith('combine_masks')][0]
overlap = symmetric_overlap(expected_fname,
combine_masks.result.outputs.out_file)
assert overlap < 0.95, input_fname
|
Use bold_reference_wf to generate reference before enhancing
|
TEST: Use bold_reference_wf to generate reference before enhancing
|
Python
|
bsd-3-clause
|
poldracklab/preprocessing-workflow,poldracklab/fmriprep,poldracklab/preprocessing-workflow,oesteban/fmriprep,oesteban/fmriprep,oesteban/fmriprep,oesteban/preprocessing-workflow,poldracklab/fmriprep,poldracklab/fmriprep,oesteban/preprocessing-workflow
|
''' Testing module for fmriprep.workflows.base '''
import pytest
import numpy as np
from nilearn.image import load_img
- from ..utils import init_enhance_and_skullstrip_bold_wf
+ from ..utils import init_bold_reference_wf
def symmetric_overlap(img1, img2):
mask1 = load_img(img1).get_data() > 0
mask2 = load_img(img2).get_data() > 0
total1 = np.sum(mask1)
total2 = np.sum(mask2)
overlap = np.sum(mask1 & mask2)
return overlap / np.sqrt(total1 * total2)
+ @pytest.skip
def test_masking(input_fname, expected_fname):
- enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf()
- enhance_and_skullstrip_bold_wf.inputs.inputnode.in_file = input_fname
- res = enhance_and_skullstrip_bold_wf.run()
+ bold_reference_wf = init_bold_reference_wf(enhance_t2=True)
+ bold_reference_wf.inputs.inputnode.bold_file = input_fname
+ res = bold_reference_wf.run()
- combine_masks = [node for node in res.nodes if node.name == 'combine_masks'][0]
+ combine_masks = [node for node in res.nodes if node.name.endswith('combine_masks')][0]
overlap = symmetric_overlap(expected_fname,
combine_masks.result.outputs.out_file)
assert overlap < 0.95, input_fname
|
Use bold_reference_wf to generate reference before enhancing
|
## Code Before:
''' Testing module for fmriprep.workflows.base '''
import pytest
import numpy as np
from nilearn.image import load_img
from ..utils import init_enhance_and_skullstrip_bold_wf
def symmetric_overlap(img1, img2):
mask1 = load_img(img1).get_data() > 0
mask2 = load_img(img2).get_data() > 0
total1 = np.sum(mask1)
total2 = np.sum(mask2)
overlap = np.sum(mask1 & mask2)
return overlap / np.sqrt(total1 * total2)
def test_masking(input_fname, expected_fname):
enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf()
enhance_and_skullstrip_bold_wf.inputs.inputnode.in_file = input_fname
res = enhance_and_skullstrip_bold_wf.run()
combine_masks = [node for node in res.nodes if node.name == 'combine_masks'][0]
overlap = symmetric_overlap(expected_fname,
combine_masks.result.outputs.out_file)
assert overlap < 0.95, input_fname
## Instruction:
Use bold_reference_wf to generate reference before enhancing
## Code After:
''' Testing module for fmriprep.workflows.base '''
import pytest
import numpy as np
from nilearn.image import load_img
from ..utils import init_bold_reference_wf
def symmetric_overlap(img1, img2):
mask1 = load_img(img1).get_data() > 0
mask2 = load_img(img2).get_data() > 0
total1 = np.sum(mask1)
total2 = np.sum(mask2)
overlap = np.sum(mask1 & mask2)
return overlap / np.sqrt(total1 * total2)
@pytest.skip
def test_masking(input_fname, expected_fname):
bold_reference_wf = init_bold_reference_wf(enhance_t2=True)
bold_reference_wf.inputs.inputnode.bold_file = input_fname
res = bold_reference_wf.run()
combine_masks = [node for node in res.nodes if node.name.endswith('combine_masks')][0]
overlap = symmetric_overlap(expected_fname,
combine_masks.result.outputs.out_file)
assert overlap < 0.95, input_fname
|
''' Testing module for fmriprep.workflows.base '''
import pytest
import numpy as np
from nilearn.image import load_img
- from ..utils import init_enhance_and_skullstrip_bold_wf
+ from ..utils import init_bold_reference_wf
def symmetric_overlap(img1, img2):
mask1 = load_img(img1).get_data() > 0
mask2 = load_img(img2).get_data() > 0
total1 = np.sum(mask1)
total2 = np.sum(mask2)
overlap = np.sum(mask1 & mask2)
return overlap / np.sqrt(total1 * total2)
+ @pytest.skip
def test_masking(input_fname, expected_fname):
- enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf()
- enhance_and_skullstrip_bold_wf.inputs.inputnode.in_file = input_fname
- res = enhance_and_skullstrip_bold_wf.run()
+ bold_reference_wf = init_bold_reference_wf(enhance_t2=True)
+ bold_reference_wf.inputs.inputnode.bold_file = input_fname
+ res = bold_reference_wf.run()
- combine_masks = [node for node in res.nodes if node.name == 'combine_masks'][0]
? ^^^^
+ combine_masks = [node for node in res.nodes if node.name.endswith('combine_masks')][0]
? ^^^^^^^^^^ +
overlap = symmetric_overlap(expected_fname,
combine_masks.result.outputs.out_file)
assert overlap < 0.95, input_fname
|
23e57facea49ebc093d1da7a9ae6857cd2c8dad7
|
warehouse/defaults.py
|
warehouse/defaults.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The URI for our Redis database.
REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
Add an explicit default for REDIS_URI
|
Add an explicit default for REDIS_URI
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
+
+ # The URI for our Redis database.
+ REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
Add an explicit default for REDIS_URI
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
## Instruction:
Add an explicit default for REDIS_URI
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
# The URI for our Redis database.
REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# The base domain name for this installation. Used to control linking to
# sub-domains.
SERVER_NAME = "warehouse.local"
# The URI for our PostgreSQL database.
SQLALCHEMY_DATABASE_URI = "postgres:///warehouse"
+
+ # The URI for our Redis database.
+ REDIS_URI = "redis://localhost:6379/0"
# The type of Storage to use. Can be either Filesystem or S3.
STORAGE = "Filesystem"
# The hash to use in computing filenames.
# Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None
STORAGE_HASH = "md5"
# Base directory for storage when using the Filesystem.
STORAGE_DIRECTORY = "data"
# The name of the bucket that files will be stored in when using S3.
# STORAGE_BUCKET = "<storage bucket>"
# The S3 Key used to access S3 when using S3 Storage
# S3_KEY = "<S3 Key>"
# The S3 Secret used to access S# when using S3 Storage
# S3_SECRET = "<S3 Secret>"
|
adb6ce275e1cbc2d000286e169a4a96b25b32dbb
|
test_doc.py
|
test_doc.py
|
import doctest
import sys
if hasattr(doctest, "testfile"):
print("=== Test file: README ===")
failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
if failure:
sys.exit(1)
print("=== Test file: test.rst ===")
failure, tests = doctest.testfile('test/test.rst', optionflags=doctest.ELLIPSIS)
if failure:
sys.exit(1)
print("=== Test IPy module ===")
import IPy
failure, tests = doctest.testmod(IPy)
if failure:
sys.exit(1)
else:
sys.stderr.write("WARNING: doctest has no function testfile (before Python 2.4), unable to check README\n")
|
import doctest
import sys
if hasattr(doctest, "testfile"):
total_failures, total_tests = (0, 0)
print("=== Test file: README ===")
failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
total_failures += failure
total_tests += tests
print("=== Test file: test.rst ===")
failure, tests = doctest.testfile('test/test.rst', optionflags=doctest.ELLIPSIS)
total_failures += failure
total_tests += tests
print("=== Test IPy module ===")
import IPy
failure, tests = doctest.testmod(IPy)
total_failures += failure
total_tests += tests
print("=== Overall Results ===")
print("total tests %d, failures %d" % (total_tests, total_failures))
if total_failures:
sys.exit(1)
else:
sys.stderr.write("WARNING: doctest has no function testfile (before Python 2.4), unable to check README\n")
|
Allow doctest runner to keep going after failures
|
Allow doctest runner to keep going after failures
It will still return an error code, but there is little need to halt the
running of the three different doctest modules if an early one fails,
which may in fact mask the real reason for failure in an IPy internal
method.
Signed-off-by: Dan McGee <[email protected]>
|
Python
|
bsd-3-clause
|
dstam/python-ipy,sigma-random/python-ipy
|
import doctest
import sys
if hasattr(doctest, "testfile"):
+ total_failures, total_tests = (0, 0)
+
print("=== Test file: README ===")
failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
- if failure:
- sys.exit(1)
+ total_failures += failure
+ total_tests += tests
print("=== Test file: test.rst ===")
failure, tests = doctest.testfile('test/test.rst', optionflags=doctest.ELLIPSIS)
- if failure:
- sys.exit(1)
+ total_failures += failure
+ total_tests += tests
print("=== Test IPy module ===")
import IPy
failure, tests = doctest.testmod(IPy)
+ total_failures += failure
+ total_tests += tests
+
+ print("=== Overall Results ===")
+ print("total tests %d, failures %d" % (total_tests, total_failures))
- if failure:
+ if total_failures:
sys.exit(1)
else:
sys.stderr.write("WARNING: doctest has no function testfile (before Python 2.4), unable to check README\n")
|
Allow doctest runner to keep going after failures
|
## Code Before:
import doctest
import sys
if hasattr(doctest, "testfile"):
print("=== Test file: README ===")
failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
if failure:
sys.exit(1)
print("=== Test file: test.rst ===")
failure, tests = doctest.testfile('test/test.rst', optionflags=doctest.ELLIPSIS)
if failure:
sys.exit(1)
print("=== Test IPy module ===")
import IPy
failure, tests = doctest.testmod(IPy)
if failure:
sys.exit(1)
else:
sys.stderr.write("WARNING: doctest has no function testfile (before Python 2.4), unable to check README\n")
## Instruction:
Allow doctest runner to keep going after failures
## Code After:
import doctest
import sys
if hasattr(doctest, "testfile"):
total_failures, total_tests = (0, 0)
print("=== Test file: README ===")
failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
total_failures += failure
total_tests += tests
print("=== Test file: test.rst ===")
failure, tests = doctest.testfile('test/test.rst', optionflags=doctest.ELLIPSIS)
total_failures += failure
total_tests += tests
print("=== Test IPy module ===")
import IPy
failure, tests = doctest.testmod(IPy)
total_failures += failure
total_tests += tests
print("=== Overall Results ===")
print("total tests %d, failures %d" % (total_tests, total_failures))
if total_failures:
sys.exit(1)
else:
sys.stderr.write("WARNING: doctest has no function testfile (before Python 2.4), unable to check README\n")
|
import doctest
import sys
if hasattr(doctest, "testfile"):
+ total_failures, total_tests = (0, 0)
+
print("=== Test file: README ===")
failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
- if failure:
- sys.exit(1)
+ total_failures += failure
+ total_tests += tests
print("=== Test file: test.rst ===")
failure, tests = doctest.testfile('test/test.rst', optionflags=doctest.ELLIPSIS)
- if failure:
- sys.exit(1)
+ total_failures += failure
+ total_tests += tests
print("=== Test IPy module ===")
import IPy
failure, tests = doctest.testmod(IPy)
+ total_failures += failure
+ total_tests += tests
+
+ print("=== Overall Results ===")
+ print("total tests %d, failures %d" % (total_tests, total_failures))
- if failure:
+ if total_failures:
? ++++++ +
sys.exit(1)
else:
sys.stderr.write("WARNING: doctest has no function testfile (before Python 2.4), unable to check README\n")
|
4217f587606c4e326b4df97681ae4f5187b6e6d9
|
falmer/content/serializers.py
|
falmer/content/serializers.py
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'resource')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
Remove wagtail_image from image resources
|
Remove wagtail_image from image resources
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
- wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
- fields = ('id', 'wagtail_image', 'resource')
+ fields = ('id', 'resource')
-
- def get_wagtail_image(self, image):
- return generate_image_url(image, 'fill-400x400')
-
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
Remove wagtail_image from image resources
|
## Code Before:
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
## Instruction:
Remove wagtail_image from image resources
## Code After:
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'resource')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
- wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
- fields = ('id', 'wagtail_image', 'resource')
? -----------------
+ fields = ('id', 'resource')
-
- def get_wagtail_image(self, image):
- return generate_image_url(image, 'fill-400x400')
-
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
def129e32bf731351253e210b53c44cf8c57c302
|
planetstack/openstack_observer/steps/sync_images.py
|
planetstack/openstack_observer/steps/sync_images.py
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
Check the existence of the images_path
|
Check the existence of the images_path
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK
Traceback (most recent call last):
File "/opt/xos/observer/event_loop.py", line 349, in sync
failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__
return self.call(**args)
File "/opt/xos/observer/syncstep.py", line 97, in call
pending = self.fetch_pending(deletion)
File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending
for f in os.listdir(images_path):
OSError: [Errno 2] No such file or directory: '/opt/xos/images'
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK
Signed-off-by: S.Çağlar Onur <[email protected]>
|
Python
|
apache-2.0
|
wathsalav/xos,wathsalav/xos,wathsalav/xos,wathsalav/xos
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
- images_path = Config().observer_images_directory
+ images_path = Config().observer_images_directory
+
available_images = {}
+ if os.path.exists(images_path):
- for f in os.listdir(images_path):
+ for f in os.listdir(images_path):
- if os.path.isfile(os.path.join(images_path ,f)):
- available_images[f] = os.path.join(images_path ,f)
+ filename = os.path.join(images_path, f)
+ if os.path.isfile(filename):
+ available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
+
-
-
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
Check the existence of the images_path
|
## Code Before:
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
## Instruction:
Check the existence of the images_path
## Code After:
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
- images_path = Config().observer_images_directory
? -
+ images_path = Config().observer_images_directory
+
available_images = {}
+ if os.path.exists(images_path):
- for f in os.listdir(images_path):
+ for f in os.listdir(images_path):
? ++++
- if os.path.isfile(os.path.join(images_path ,f)):
- available_images[f] = os.path.join(images_path ,f)
? ^^^ ^^ ---------- -
+ filename = os.path.join(images_path, f)
? ^ ++ ^ +
+ if os.path.isfile(filename):
+ available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
+
-
-
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
493aef6b9965bd4fd83fac8a4cdd790b2d8010e2
|
chainercv/links/connection/seblock.py
|
chainercv/links/connection/seblock.py
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
Simplify SEBlock by broadcast of binary op
|
Simplify SEBlock by broadcast of binary op
instead of explicit broadcast_to. The main motivation of this
change is to simplify the exported ONNX, but this would also
improve performance.
|
Python
|
mit
|
chainer/chainercv,pfnet/chainercv,chainer/chainercv
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
+ x = F.reshape(x, x.shape[:2] + (1, 1))
+ # Spatial axes of `x` will be broadcasted.
-
- x = F.broadcast_to(x, (H, W, B, C))
- x = x.transpose((2, 3, 0, 1))
-
return u * x
|
Simplify SEBlock by broadcast of binary op
|
## Code Before:
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.broadcast_to(x, (H, W, B, C))
x = x.transpose((2, 3, 0, 1))
return u * x
## Instruction:
Simplify SEBlock by broadcast of binary op
## Code After:
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
+ x = F.reshape(x, x.shape[:2] + (1, 1))
+ # Spatial axes of `x` will be broadcasted.
-
- x = F.broadcast_to(x, (H, W, B, C))
- x = x.transpose((2, 3, 0, 1))
-
return u * x
|
03dcdca0f51ca40a2e3fee6da3182197d69de21d
|
pytrmm/__init__.py
|
pytrmm/__init__.py
|
try:
from __dev_version import version as __version__
from __dev_version import git_revision as __git_revision__
except ImportError:
from __version import version as __version__
from __version import git_revision as __git_revision__
import trmm3b4xrt
|
try:
from __dev_version import version as __version__
from __dev_version import git_revision as __git_revision__
except ImportError:
from __version import version as __version__
from __version import git_revision as __git_revision__
from trmm3b4xrt import *
|
Put file reader class into top-level namespace
|
ENH: Put file reader class into top-level namespace
|
Python
|
bsd-3-clause
|
sahg/pytrmm
|
try:
from __dev_version import version as __version__
from __dev_version import git_revision as __git_revision__
except ImportError:
from __version import version as __version__
from __version import git_revision as __git_revision__
- import trmm3b4xrt
+ from trmm3b4xrt import *
|
Put file reader class into top-level namespace
|
## Code Before:
try:
from __dev_version import version as __version__
from __dev_version import git_revision as __git_revision__
except ImportError:
from __version import version as __version__
from __version import git_revision as __git_revision__
import trmm3b4xrt
## Instruction:
Put file reader class into top-level namespace
## Code After:
try:
from __dev_version import version as __version__
from __dev_version import git_revision as __git_revision__
except ImportError:
from __version import version as __version__
from __version import git_revision as __git_revision__
from trmm3b4xrt import *
|
try:
from __dev_version import version as __version__
from __dev_version import git_revision as __git_revision__
except ImportError:
from __version import version as __version__
from __version import git_revision as __git_revision__
- import trmm3b4xrt
+ from trmm3b4xrt import *
|
b6d08abf7bc4aafaeec59944bdcdf8ae4a9352d5
|
recipe_scrapers/consts.py
|
recipe_scrapers/consts.py
|
import re
TIME_REGEX = re.compile(
r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
|
import re
TIME_REGEX = re.compile(
r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
|
Update time_regex captcher so to work with more sites
|
Update time_regex captcher so to work with more sites
|
Python
|
mit
|
hhursev/recipe-scraper
|
import re
TIME_REGEX = re.compile(
- r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z'
+ r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
|
Update time_regex captcher so to work with more sites
|
## Code Before:
import re
TIME_REGEX = re.compile(
r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
## Instruction:
Update time_regex captcher so to work with more sites
## Code After:
import re
TIME_REGEX = re.compile(
r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
|
import re
TIME_REGEX = re.compile(
- r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z'
? ^^^ --- ^^^
+ r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z'
? ^ +++ ^
)
HTML_SYMBOLS = '\xa0' #
|
a3187d16a70966c84a4f4977768fcfefc93b5a6d
|
this_app/forms.py
|
this_app/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
class BucketlistForm(FlaskForm):
"""Form to CRUd a bucketlist"""
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
class BucketlistForm(FlaskForm):
"""Form to CRUd a bucketlist"""
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
class BucketlistItemForm(FlaskForm):
"""Form to CRUd a bucketlist item"""
title = StringField("Title", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
status = BooleanField("Status", validators=[DataRequired])
|
Add form to create a bucketlist item
|
Add form to create a bucketlist item
|
Python
|
mit
|
borenho/flask-bucketlist,borenho/flask-bucketlist
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
+
class BucketlistForm(FlaskForm):
"""Form to CRUd a bucketlist"""
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
+
+ class BucketlistItemForm(FlaskForm):
+ """Form to CRUd a bucketlist item"""
+ title = StringField("Title", validators=[DataRequired()])
+ description = TextAreaField("Description", validators=[DataRequired()])
+ status = BooleanField("Status", validators=[DataRequired])
+
|
Add form to create a bucketlist item
|
## Code Before:
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
class BucketlistForm(FlaskForm):
"""Form to CRUd a bucketlist"""
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
## Instruction:
Add form to create a bucketlist item
## Code After:
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
class BucketlistForm(FlaskForm):
"""Form to CRUd a bucketlist"""
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
class BucketlistItemForm(FlaskForm):
"""Form to CRUd a bucketlist item"""
title = StringField("Title", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
status = BooleanField("Status", validators=[DataRequired])
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
+
class BucketlistForm(FlaskForm):
"""Form to CRUd a bucketlist"""
name = StringField("Name", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
+
+
+ class BucketlistItemForm(FlaskForm):
+ """Form to CRUd a bucketlist item"""
+ title = StringField("Title", validators=[DataRequired()])
+ description = TextAreaField("Description", validators=[DataRequired()])
+ status = BooleanField("Status", validators=[DataRequired])
|
761aff647d3e20fc25f1911efa5d2235fe4b21d8
|
modoboa/extensions/admin/forms/forward.py
|
modoboa/extensions/admin/forms/forward.py
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
Add "form-control" attribute to some textareas
|
Add "form-control" attribute to some textareas
|
Python
|
isc
|
modoboa/modoboa,RavenB/modoboa,mehulsbhatt/modoboa,bearstech/modoboa,modoboa/modoboa,RavenB/modoboa,tonioo/modoboa,bearstech/modoboa,mehulsbhatt/modoboa,bearstech/modoboa,carragom/modoboa,modoboa/modoboa,tonioo/modoboa,carragom/modoboa,modoboa/modoboa,bearstech/modoboa,RavenB/modoboa,carragom/modoboa,tonioo/modoboa,mehulsbhatt/modoboa
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
- widget=forms.Textarea,
+ widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
Add "form-control" attribute to some textareas
|
## Code Before:
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
## Instruction:
Add "form-control" attribute to some textareas
## Code After:
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
- widget=forms.Textarea,
+ widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
0cc12b24ec4aac88380a36bb519bfc78ad81b277
|
run_job.py
|
run_job.py
|
import sys, json
from sci.session import Session
from sci.bootstrap import Bootstrap
data = json.loads(sys.stdin.read())
session_id = sys.argv[1]
session = Session.load(session_id)
run_info = data['run_info']
Bootstrap.run(session, data['build_id'], data['job_server'],
run_info['step_fun'], args = run_info['args'],
kwargs = run_info['kwargs'], env = run_info['env'])
|
import sys, json
from sci.session import Session
from sci.bootstrap import Bootstrap
data = json.loads(sys.stdin.read())
session_id = sys.argv[1]
session = Session.load(session_id)
run_info = data['run_info'] or {}
Bootstrap.run(session, data['build_id'], data['job_server'],
run_info.get('step_fun'),
args = run_info.get('args', []),
kwargs = run_info.get('kwargs', {}),
env = run_info.get('env'))
|
Support when run_info is not specified
|
Support when run_info is not specified
That is the case when starting a build (not running a step)
|
Python
|
apache-2.0
|
boivie/sci,boivie/sci
|
import sys, json
from sci.session import Session
from sci.bootstrap import Bootstrap
data = json.loads(sys.stdin.read())
session_id = sys.argv[1]
session = Session.load(session_id)
- run_info = data['run_info']
+ run_info = data['run_info'] or {}
+
Bootstrap.run(session, data['build_id'], data['job_server'],
- run_info['step_fun'], args = run_info['args'],
- kwargs = run_info['kwargs'], env = run_info['env'])
+ run_info.get('step_fun'),
+ args = run_info.get('args', []),
+ kwargs = run_info.get('kwargs', {}),
+ env = run_info.get('env'))
|
Support when run_info is not specified
|
## Code Before:
import sys, json
from sci.session import Session
from sci.bootstrap import Bootstrap
data = json.loads(sys.stdin.read())
session_id = sys.argv[1]
session = Session.load(session_id)
run_info = data['run_info']
Bootstrap.run(session, data['build_id'], data['job_server'],
run_info['step_fun'], args = run_info['args'],
kwargs = run_info['kwargs'], env = run_info['env'])
## Instruction:
Support when run_info is not specified
## Code After:
import sys, json
from sci.session import Session
from sci.bootstrap import Bootstrap
data = json.loads(sys.stdin.read())
session_id = sys.argv[1]
session = Session.load(session_id)
run_info = data['run_info'] or {}
Bootstrap.run(session, data['build_id'], data['job_server'],
run_info.get('step_fun'),
args = run_info.get('args', []),
kwargs = run_info.get('kwargs', {}),
env = run_info.get('env'))
|
import sys, json
from sci.session import Session
from sci.bootstrap import Bootstrap
data = json.loads(sys.stdin.read())
session_id = sys.argv[1]
session = Session.load(session_id)
- run_info = data['run_info']
+ run_info = data['run_info'] or {}
? ++++++
+
Bootstrap.run(session, data['build_id'], data['job_server'],
- run_info['step_fun'], args = run_info['args'],
- kwargs = run_info['kwargs'], env = run_info['env'])
+ run_info.get('step_fun'),
+ args = run_info.get('args', []),
+ kwargs = run_info.get('kwargs', {}),
+ env = run_info.get('env'))
|
95686933911fb2720b0fad816056c9008ef1097d
|
kafka_influxdb/reader/kafka_reader.py
|
kafka_influxdb/reader/kafka_reader.py
|
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, connection_wait_time=2):
""" Initialize Kafka reader """
self.host = host
self.port = port
self.group = group
self.topic = topic
self.connection_wait_time = connection_wait_time
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at {}...", connection)
kafka_client = KafkaClient(connection)
self.consumer = SimpleConsumer(kafka_client,
self.group,
self.topic)
def read(self):
""" Yield messages from Kafka topic """
while True:
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception:
logging.error("Connection to Kafka lost. Trying to reconnect to {}:{}...",
self.host, self.port)
time.sleep(self.connection_wait_time)
|
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, connection_wait_time=2):
""" Initialize Kafka reader """
self.host = host
self.port = port
self.group = group
self.topic = topic
self.connection_wait_time = connection_wait_time
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at {}...", connection)
self.kafka_client = KafkaClient(connection)
self.consumer = SimpleConsumer(self.kafka_client,
self.group,
self.topic)
def read(self):
""" Yield messages from Kafka topic """
while True:
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception:
logging.error("Connection to Kafka lost. Trying to reconnect to {}:{}...",
self.host, self.port)
time.sleep(self.connection_wait_time)
|
Make kafka client a field in Reader
|
Make kafka client a field in Reader
|
Python
|
apache-2.0
|
mre/kafka-influxdb,mre/kafka-influxdb
|
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, connection_wait_time=2):
""" Initialize Kafka reader """
self.host = host
self.port = port
self.group = group
self.topic = topic
self.connection_wait_time = connection_wait_time
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at {}...", connection)
- kafka_client = KafkaClient(connection)
+ self.kafka_client = KafkaClient(connection)
- self.consumer = SimpleConsumer(kafka_client,
+ self.consumer = SimpleConsumer(self.kafka_client,
self.group,
self.topic)
def read(self):
""" Yield messages from Kafka topic """
while True:
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception:
logging.error("Connection to Kafka lost. Trying to reconnect to {}:{}...",
self.host, self.port)
time.sleep(self.connection_wait_time)
|
Make kafka client a field in Reader
|
## Code Before:
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, connection_wait_time=2):
""" Initialize Kafka reader """
self.host = host
self.port = port
self.group = group
self.topic = topic
self.connection_wait_time = connection_wait_time
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at {}...", connection)
kafka_client = KafkaClient(connection)
self.consumer = SimpleConsumer(kafka_client,
self.group,
self.topic)
def read(self):
""" Yield messages from Kafka topic """
while True:
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception:
logging.error("Connection to Kafka lost. Trying to reconnect to {}:{}...",
self.host, self.port)
time.sleep(self.connection_wait_time)
## Instruction:
Make kafka client a field in Reader
## Code After:
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, connection_wait_time=2):
""" Initialize Kafka reader """
self.host = host
self.port = port
self.group = group
self.topic = topic
self.connection_wait_time = connection_wait_time
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at {}...", connection)
self.kafka_client = KafkaClient(connection)
self.consumer = SimpleConsumer(self.kafka_client,
self.group,
self.topic)
def read(self):
""" Yield messages from Kafka topic """
while True:
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception:
logging.error("Connection to Kafka lost. Trying to reconnect to {}:{}...",
self.host, self.port)
time.sleep(self.connection_wait_time)
|
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, connection_wait_time=2):
""" Initialize Kafka reader """
self.host = host
self.port = port
self.group = group
self.topic = topic
self.connection_wait_time = connection_wait_time
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at {}...", connection)
- kafka_client = KafkaClient(connection)
+ self.kafka_client = KafkaClient(connection)
? +++++
- self.consumer = SimpleConsumer(kafka_client,
+ self.consumer = SimpleConsumer(self.kafka_client,
? +++++
self.group,
self.topic)
def read(self):
""" Yield messages from Kafka topic """
while True:
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception:
logging.error("Connection to Kafka lost. Trying to reconnect to {}:{}...",
self.host, self.port)
time.sleep(self.connection_wait_time)
|
76784dc06bc1d7fedb7e2e85f87fc4a2c2a489fc
|
chainer/functions/reshape.py
|
chainer/functions/reshape.py
|
from chainer import function
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
import numpy
from chainer import function
from chainer.utils import type_check
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def check_type_forward(self, in_type):
type_check.expect(in_type.size() == 1)
x_type, = in_type
in_shape_size = type_check.Variable(
numpy.prod(x_type.shape.eval()), 'in_shape_size')
out_shape_size = type_check.Variable(
numpy.prod(self.shape), 'out_shape_size')
type_check.expect(in_shape_size == out_shape_size)
def check_type_backward(self, in_types, out_types):
type_check.expect(out_types.size() == 1)
x_type, = in_types
y_type, = out_types
in_shape_size = type_check.Variable(
numpy.prod(x_type.shape.eval()), 'in_shape_size')
out_shape_size = type_check.Variable(
numpy.prod(y_type.shape.eval()), 'out_shape_size')
type_check.expect(in_shape_size == out_shape_size)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
Add typecheck for Reshape function
|
Add typecheck for Reshape function
|
Python
|
mit
|
muupan/chainer,niboshi/chainer,AlpacaDB/chainer,ktnyt/chainer,keisuke-umezawa/chainer,ytoyama/yans_chainer_hackathon,elviswf/chainer,wkentaro/chainer,jfsantos/chainer,chainer/chainer,ikasumi/chainer,hvy/chainer,okuta/chainer,bayerj/chainer,tigerneil/chainer,okuta/chainer,cupy/cupy,muupan/chainer,sinhrks/chainer,anaruse/chainer,niboshi/chainer,truongdq/chainer,ronekko/chainer,delta2323/chainer,cemoody/chainer,cupy/cupy,1986ks/chainer,hvy/chainer,keisuke-umezawa/chainer,woodshop/chainer,tkerola/chainer,tscohen/chainer,jnishi/chainer,jnishi/chainer,wavelets/chainer,okuta/chainer,sinhrks/chainer,ktnyt/chainer,kashif/chainer,jnishi/chainer,minhpqn/chainer,chainer/chainer,wkentaro/chainer,keisuke-umezawa/chainer,truongdq/chainer,cupy/cupy,cupy/cupy,kikusu/chainer,wkentaro/chainer,laysakura/chainer,ktnyt/chainer,kuwa32/chainer,masia02/chainer,hidenori-t/chainer,pfnet/chainer,jnishi/chainer,wkentaro/chainer,niboshi/chainer,hvy/chainer,ktnyt/chainer,yanweifu/chainer,niboshi/chainer,okuta/chainer,benob/chainer,umitanuki/chainer,kikusu/chainer,keisuke-umezawa/chainer,woodshop/complex-chainer,aonotas/chainer,ysekky/chainer,sou81821/chainer,t-abe/chainer,chainer/chainer,hvy/chainer,rezoo/chainer,kiyukuta/chainer,chainer/chainer,t-abe/chainer,AlpacaDB/chainer,Kaisuke5/chainer,benob/chainer
|
+ import numpy
+
from chainer import function
+ from chainer.utils import type_check
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
+
+ def check_type_forward(self, in_type):
+ type_check.expect(in_type.size() == 1)
+ x_type, = in_type
+
+ in_shape_size = type_check.Variable(
+ numpy.prod(x_type.shape.eval()), 'in_shape_size')
+ out_shape_size = type_check.Variable(
+ numpy.prod(self.shape), 'out_shape_size')
+ type_check.expect(in_shape_size == out_shape_size)
+
+ def check_type_backward(self, in_types, out_types):
+ type_check.expect(out_types.size() == 1)
+ x_type, = in_types
+ y_type, = out_types
+
+ in_shape_size = type_check.Variable(
+ numpy.prod(x_type.shape.eval()), 'in_shape_size')
+ out_shape_size = type_check.Variable(
+ numpy.prod(y_type.shape.eval()), 'out_shape_size')
+ type_check.expect(in_shape_size == out_shape_size)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
Add typecheck for Reshape function
|
## Code Before:
from chainer import function
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
## Instruction:
Add typecheck for Reshape function
## Code After:
import numpy
from chainer import function
from chainer.utils import type_check
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def check_type_forward(self, in_type):
type_check.expect(in_type.size() == 1)
x_type, = in_type
in_shape_size = type_check.Variable(
numpy.prod(x_type.shape.eval()), 'in_shape_size')
out_shape_size = type_check.Variable(
numpy.prod(self.shape), 'out_shape_size')
type_check.expect(in_shape_size == out_shape_size)
def check_type_backward(self, in_types, out_types):
type_check.expect(out_types.size() == 1)
x_type, = in_types
y_type, = out_types
in_shape_size = type_check.Variable(
numpy.prod(x_type.shape.eval()), 'in_shape_size')
out_shape_size = type_check.Variable(
numpy.prod(y_type.shape.eval()), 'out_shape_size')
type_check.expect(in_shape_size == out_shape_size)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
+ import numpy
+
from chainer import function
+ from chainer.utils import type_check
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
+
+ def check_type_forward(self, in_type):
+ type_check.expect(in_type.size() == 1)
+ x_type, = in_type
+
+ in_shape_size = type_check.Variable(
+ numpy.prod(x_type.shape.eval()), 'in_shape_size')
+ out_shape_size = type_check.Variable(
+ numpy.prod(self.shape), 'out_shape_size')
+ type_check.expect(in_shape_size == out_shape_size)
+
+ def check_type_backward(self, in_types, out_types):
+ type_check.expect(out_types.size() == 1)
+ x_type, = in_types
+ y_type, = out_types
+
+ in_shape_size = type_check.Variable(
+ numpy.prod(x_type.shape.eval()), 'in_shape_size')
+ out_shape_size = type_check.Variable(
+ numpy.prod(y_type.shape.eval()), 'out_shape_size')
+ type_check.expect(in_shape_size == out_shape_size)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
a6e868803e1336d83ee8863d15896880603fc777
|
tornwamp/customize.py
|
tornwamp/customize.py
|
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
Add PublishProcessor to processors' list
|
Add PublishProcessor to processors' list
|
Python
|
apache-2.0
|
ef-ctx/tornwamp
|
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
- Code.CALL: rpc.CallProcessor
+ Code.CALL: rpc.CallProcessor,
+ Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
Add PublishProcessor to processors' list
|
## Code Before:
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
## Instruction:
Add PublishProcessor to processors' list
## Code After:
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
- Code.CALL: rpc.CallProcessor
+ Code.CALL: rpc.CallProcessor,
? +
+ Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
b8e085c538b9eda06a831c78f55219ac4612a5da
|
model.py
|
model.py
|
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
def write(self):
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
import os
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
def write(self):
if not os.path.exists('xml'): os.mkdir('xml')
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
Make directory if it doesnt exist
|
Make directory if it doesnt exist
|
Python
|
apache-2.0
|
faskiri/google-drive-extract-images
|
+ import os
+
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
- def write(self):
+ def write(self):
+ if not os.path.exists('xml'): os.mkdir('xml')
+
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
Make directory if it doesnt exist
|
## Code Before:
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
def write(self):
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
## Instruction:
Make directory if it doesnt exist
## Code After:
import os
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
def write(self):
if not os.path.exists('xml'): os.mkdir('xml')
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
+ import os
+
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
- def write(self):
? -
+ def write(self):
+ if not os.path.exists('xml'): os.mkdir('xml')
+
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
4615a9e26f9a6064572d409ccf8a79a7ab584a38
|
carson/__init__.py
|
carson/__init__.py
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('carson.default_settings')
db = SQLAlchemy(app)
from . import api
from . import models
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('carson.default_settings')
app.config.from_envvar('CARSON_SETTINGS', silent=True)
db = SQLAlchemy(app)
from . import api
from . import models
|
Allow overwriting of config from envvar
|
Allow overwriting of config from envvar
|
Python
|
mit
|
SylverStudios/carson
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('carson.default_settings')
+ app.config.from_envvar('CARSON_SETTINGS', silent=True)
db = SQLAlchemy(app)
from . import api
from . import models
|
Allow overwriting of config from envvar
|
## Code Before:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('carson.default_settings')
db = SQLAlchemy(app)
from . import api
from . import models
## Instruction:
Allow overwriting of config from envvar
## Code After:
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('carson.default_settings')
app.config.from_envvar('CARSON_SETTINGS', silent=True)
db = SQLAlchemy(app)
from . import api
from . import models
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('carson.default_settings')
+ app.config.from_envvar('CARSON_SETTINGS', silent=True)
db = SQLAlchemy(app)
from . import api
from . import models
|
f23ee95c7b662dec71ed7fd527854a7f832e3603
|
Lib/test/test_ctypes.py
|
Lib/test/test_ctypes.py
|
import _ctypes
import ctypes
|
import unittest
from test.test_support import run_suite
import ctypes.test
def test_main():
skipped, testcases = ctypes.test.get_tests(ctypes.test, "test_*.py", verbosity=0)
suites = [unittest.makeSuite(t) for t in testcases]
run_suite(unittest.TestSuite(suites))
if __name__ == "__main__":
test_main()
|
Replace the trivial ctypes test (did only an import) with the real test suite.
|
Replace the trivial ctypes test (did only an import) with the real test suite.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
+ import unittest
- import _ctypes
+ from test.test_support import run_suite
- import ctypes
+ import ctypes.test
+ def test_main():
+ skipped, testcases = ctypes.test.get_tests(ctypes.test, "test_*.py", verbosity=0)
+ suites = [unittest.makeSuite(t) for t in testcases]
+ run_suite(unittest.TestSuite(suites))
+
+ if __name__ == "__main__":
+ test_main()
+
|
Replace the trivial ctypes test (did only an import) with the real test suite.
|
## Code Before:
import _ctypes
import ctypes
## Instruction:
Replace the trivial ctypes test (did only an import) with the real test suite.
## Code After:
import unittest
from test.test_support import run_suite
import ctypes.test
def test_main():
skipped, testcases = ctypes.test.get_tests(ctypes.test, "test_*.py", verbosity=0)
suites = [unittest.makeSuite(t) for t in testcases]
run_suite(unittest.TestSuite(suites))
if __name__ == "__main__":
test_main()
|
+ import unittest
- import _ctypes
+ from test.test_support import run_suite
- import ctypes
+ import ctypes.test
? +++++
+
+ def test_main():
+ skipped, testcases = ctypes.test.get_tests(ctypes.test, "test_*.py", verbosity=0)
+ suites = [unittest.makeSuite(t) for t in testcases]
+ run_suite(unittest.TestSuite(suites))
+
+ if __name__ == "__main__":
+ test_main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.