commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22558f86de3b76b3a9262ee5df3f8802b4c38f88
|
pylib/gfxprim/loaders/_extend_context.py
|
pylib/gfxprim/loaders/_extend_context.py
|
from ..utils import extend, add_swig_getmethod, add_swig_setmethod
from . import loaders_c
def extend_context(_context):
"""
Extends _context class with loader module methods for calling convenience.
Called once on loaders module inicialization.
"""
@extend(_context, name='load')
@staticmethod
def Load(filename):
"Load image from given file, guess type."
c = loaders_c.GP_LoadImage_Wrap(filename)
return c
@extend(_context)
def Save(self, filename, format=None):
"""Save the image in given format (or guess it from the extension)
Currently, JPG, PNG and P[BGP]M are supported, but not for all
context pixel types.
"""
if not format:
format = filename.rsplit('.', 1)[-1]
format = format.lower()
if format == 'jpg':
res = loaders_c.GP_SaveJPG(filename, self, None)
elif format == 'png':
res = loaders_c.GP_SavePNG(filename, self, None)
elif format == 'pbm':
res = loaders_c.GP_SavePBM(filename, self, None)
elif format == 'pgm':
res = loaders_c.GP_SavePGM(filename, self, None)
elif format == 'ppm':
res = loaders_c.GP_SavePPM(filename, self, None)
else:
raise Exception("Format %r not supported.", format)
if res != 0:
raise Exception("Error saving %r (code %d)", filename, res)
|
from ..utils import extend, add_swig_getmethod, add_swig_setmethod
from . import loaders_c
def extend_context(_context):
"""
Extends _context class with loader module methods for calling convenience.
Called once on loaders module inicialization.
"""
@extend(_context, name='load')
@staticmethod
def Load(filename):
"Load image from given file, guess type."
c = loaders_c.GP_LoadImage_Wrap(filename)
return c
@extend(_context)
def Save(self, filename, format=None, callback=None):
"""Save the image in given format (or guess it from the extension)
Currently, JPG, PNG and P[BGP]M are supported, but not for all
context pixel types.
"""
if not format:
format = filename.rsplit('.', 1)[-1]
format = format.lower()
if format == 'jpg':
res = loaders_c.GP_SaveJPG(self, filename, callback)
elif format == 'png':
res = loaders_c.GP_SavePNG(self, filename, callback)
elif format == 'pbm':
res = loaders_c.GP_SavePBM(filename, self, callback)
elif format == 'pgm':
res = loaders_c.GP_SavePGM(filename, self, callback)
elif format == 'ppm':
res = loaders_c.GP_SavePPM(filename, self, callback)
else:
raise Exception("Format %r not supported.", format)
if res != 0:
raise Exception("Error saving %r (code %d)", filename, res)
|
Fix the loaders extend context after API change.
|
pywrap: Fix the loaders extend context after API change.
|
Python
|
lgpl-2.1
|
gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim
|
from ..utils import extend, add_swig_getmethod, add_swig_setmethod
from . import loaders_c
def extend_context(_context):
"""
Extends _context class with loader module methods for calling convenience.
Called once on loaders module inicialization.
"""
@extend(_context, name='load')
@staticmethod
def Load(filename):
"Load image from given file, guess type."
c = loaders_c.GP_LoadImage_Wrap(filename)
return c
@extend(_context)
- def Save(self, filename, format=None):
+ def Save(self, filename, format=None, callback=None):
"""Save the image in given format (or guess it from the extension)
Currently, JPG, PNG and P[BGP]M are supported, but not for all
context pixel types.
"""
if not format:
format = filename.rsplit('.', 1)[-1]
format = format.lower()
if format == 'jpg':
- res = loaders_c.GP_SaveJPG(filename, self, None)
+ res = loaders_c.GP_SaveJPG(self, filename, callback)
elif format == 'png':
- res = loaders_c.GP_SavePNG(filename, self, None)
+ res = loaders_c.GP_SavePNG(self, filename, callback)
elif format == 'pbm':
- res = loaders_c.GP_SavePBM(filename, self, None)
+ res = loaders_c.GP_SavePBM(filename, self, callback)
elif format == 'pgm':
- res = loaders_c.GP_SavePGM(filename, self, None)
+ res = loaders_c.GP_SavePGM(filename, self, callback)
elif format == 'ppm':
- res = loaders_c.GP_SavePPM(filename, self, None)
+ res = loaders_c.GP_SavePPM(filename, self, callback)
else:
raise Exception("Format %r not supported.", format)
if res != 0:
raise Exception("Error saving %r (code %d)", filename, res)
|
Fix the loaders extend context after API change.
|
## Code Before:
from ..utils import extend, add_swig_getmethod, add_swig_setmethod
from . import loaders_c
def extend_context(_context):
"""
Extends _context class with loader module methods for calling convenience.
Called once on loaders module inicialization.
"""
@extend(_context, name='load')
@staticmethod
def Load(filename):
"Load image from given file, guess type."
c = loaders_c.GP_LoadImage_Wrap(filename)
return c
@extend(_context)
def Save(self, filename, format=None):
"""Save the image in given format (or guess it from the extension)
Currently, JPG, PNG and P[BGP]M are supported, but not for all
context pixel types.
"""
if not format:
format = filename.rsplit('.', 1)[-1]
format = format.lower()
if format == 'jpg':
res = loaders_c.GP_SaveJPG(filename, self, None)
elif format == 'png':
res = loaders_c.GP_SavePNG(filename, self, None)
elif format == 'pbm':
res = loaders_c.GP_SavePBM(filename, self, None)
elif format == 'pgm':
res = loaders_c.GP_SavePGM(filename, self, None)
elif format == 'ppm':
res = loaders_c.GP_SavePPM(filename, self, None)
else:
raise Exception("Format %r not supported.", format)
if res != 0:
raise Exception("Error saving %r (code %d)", filename, res)
## Instruction:
Fix the loaders extend context after API change.
## Code After:
from ..utils import extend, add_swig_getmethod, add_swig_setmethod
from . import loaders_c
def extend_context(_context):
"""
Extends _context class with loader module methods for calling convenience.
Called once on loaders module inicialization.
"""
@extend(_context, name='load')
@staticmethod
def Load(filename):
"Load image from given file, guess type."
c = loaders_c.GP_LoadImage_Wrap(filename)
return c
@extend(_context)
def Save(self, filename, format=None, callback=None):
"""Save the image in given format (or guess it from the extension)
Currently, JPG, PNG and P[BGP]M are supported, but not for all
context pixel types.
"""
if not format:
format = filename.rsplit('.', 1)[-1]
format = format.lower()
if format == 'jpg':
res = loaders_c.GP_SaveJPG(self, filename, callback)
elif format == 'png':
res = loaders_c.GP_SavePNG(self, filename, callback)
elif format == 'pbm':
res = loaders_c.GP_SavePBM(filename, self, callback)
elif format == 'pgm':
res = loaders_c.GP_SavePGM(filename, self, callback)
elif format == 'ppm':
res = loaders_c.GP_SavePPM(filename, self, callback)
else:
raise Exception("Format %r not supported.", format)
if res != 0:
raise Exception("Error saving %r (code %d)", filename, res)
|
from ..utils import extend, add_swig_getmethod, add_swig_setmethod
from . import loaders_c
def extend_context(_context):
"""
Extends _context class with loader module methods for calling convenience.
Called once on loaders module inicialization.
"""
@extend(_context, name='load')
@staticmethod
def Load(filename):
"Load image from given file, guess type."
c = loaders_c.GP_LoadImage_Wrap(filename)
return c
@extend(_context)
- def Save(self, filename, format=None):
+ def Save(self, filename, format=None, callback=None):
? +++++++++++++++
"""Save the image in given format (or guess it from the extension)
Currently, JPG, PNG and P[BGP]M are supported, but not for all
context pixel types.
"""
if not format:
format = filename.rsplit('.', 1)[-1]
format = format.lower()
if format == 'jpg':
- res = loaders_c.GP_SaveJPG(filename, self, None)
? ^^ ^^^^^^^
+ res = loaders_c.GP_SaveJPG(self, filename, callback)
? ++++++ ^^ ^^^^^
elif format == 'png':
- res = loaders_c.GP_SavePNG(filename, self, None)
? ^^ ^^^^^^^
+ res = loaders_c.GP_SavePNG(self, filename, callback)
? ++++++ ^^ ^^^^^
elif format == 'pbm':
- res = loaders_c.GP_SavePBM(filename, self, None)
? ^^^^
+ res = loaders_c.GP_SavePBM(filename, self, callback)
? ^^^^^^^^
elif format == 'pgm':
- res = loaders_c.GP_SavePGM(filename, self, None)
? ^^^^
+ res = loaders_c.GP_SavePGM(filename, self, callback)
? ^^^^^^^^
elif format == 'ppm':
- res = loaders_c.GP_SavePPM(filename, self, None)
? ^^^^
+ res = loaders_c.GP_SavePPM(filename, self, callback)
? ^^^^^^^^
else:
raise Exception("Format %r not supported.", format)
if res != 0:
raise Exception("Error saving %r (code %d)", filename, res)
|
65ef07040e8b0e34ce6dae42850789bdd8f4806a
|
cmsplugin_filer_file/models.py
|
cmsplugin_filer_file/models.py
|
from posixpath import exists
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.file import FilerFileField
from cmsplugin_filer_utils import FilerPluginManager
class FilerFile(CMSPlugin):
"""
Plugin for storing any type of file.
Default template displays download link with icon (if available) and file size.
This could be updated to use the mimetypes library to determine the type of file rather than
storing a separate icon for each different extension.
The icon search is currently performed within get_icon_url; this is probably a performance concern.
"""
title = models.CharField(_("title"), max_length=255, null=True, blank=True)
file = FilerFileField(verbose_name=_('file'))
target_blank = models.BooleanField(_('Open link in new window'), default=False)
objects = FilerPluginManager(select_related=('file',))
def get_icon_url(self):
return self.file.icons['32']
def file_exists(self):
return exists(self.file.path)
def get_file_name(self):
return self.file.name
def get_ext(self):
return self.file.extension
def __unicode__(self):
if self.title:
return self.title
elif self.file:
# added if, because it raised attribute error when file wasnt defined
return self.get_file_name()
return "<empty>"
search_fields = ('title',)
|
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.file import FilerFileField
from cmsplugin_filer_utils import FilerPluginManager
class FilerFile(CMSPlugin):
"""
Plugin for storing any type of file.
Default template displays download link with icon (if available) and file size.
This could be updated to use the mimetypes library to determine the type of file rather than
storing a separate icon for each different extension.
The icon search is currently performed within get_icon_url; this is probably a performance concern.
"""
title = models.CharField(_("title"), max_length=255, null=True, blank=True)
file = FilerFileField(verbose_name=_('file'))
target_blank = models.BooleanField(_('Open link in new window'), default=False)
objects = FilerPluginManager(select_related=('file',))
def get_icon_url(self):
return self.file.icons['32']
def file_exists(self):
return self.file.file.storage.exists(self.file.path)
def get_file_name(self):
if self.file.name in ('', None):
name = u"%s" % (self.file.original_filename,)
else:
name = u"%s" % (self.file.name,)
return name
def get_ext(self):
return self.file.extension
def __unicode__(self):
if self.title:
return self.title
elif self.file:
# added if, because it raised attribute error when file wasnt defined
return self.get_file_name()
return "<empty>"
search_fields = ('title',)
|
Use the file's storage to determine whether the file exists or not. The existing implementation that uses posixpath.exists only works if the storage backend is the default FileSystemStorage
|
Use the file's storage to determine whether the file exists or not.
The existing implementation that uses posixpath.exists only works if the storage backend is the default FileSystemStorage
|
Python
|
bsd-3-clause
|
nephila/cmsplugin-filer,stefanfoulis/cmsplugin-filer,centralniak/cmsplugin-filer,creimers/cmsplugin-filer,stefanfoulis/cmsplugin-filer,yvess/cmsplugin-filer,alsoicode/cmsplugin-filer,yvess/cmsplugin-filer,jrutila/cmsplugin-filer,brightinteractive/cmsplugin-filer,wlanslovenija/cmsplugin-filer,sephii/cmsplugin-filer,eliasp/cmsplugin-filer,ImaginaryLandscape/cmsplugin-filer,brightinteractive/cmsplugin-filer,divio/cmsplugin-filer,douwevandermeij/cmsplugin-filer,grigoryk/cmsplugin-filer,grigoryk/cmsplugin-filer,dreipol/cmsplugin-filer,skirsdeda/cmsplugin-filer,brightinteractive/cmsplugin-filer,nephila/cmsplugin-filer,yakky/cmsplugin-filer,isotoma/cmsplugin-filer,douwevandermeij/cmsplugin-filer,nephila/cmsplugin-filer,jrutila/cmsplugin-filer,jschneier/cmsplugin-filer,pbs/cmsplugin-filer,sephii/cmsplugin-filer,creimers/cmsplugin-filer,wlanslovenija/cmsplugin-filer,NB-Dev/cmsplugin-filer,divio/cmsplugin-filer,divio/cmsplugin-filer,yakky/cmsplugin-filer,alsoicode/cmsplugin-filer,alsoicode/cmsplugin-filer,eliasp/cmsplugin-filer,stefanfoulis/cmsplugin-filer,isotoma/cmsplugin-filer,dreipol/cmsplugin-filer,douwevandermeij/cmsplugin-filer,sephii/cmsplugin-filer,pbs/cmsplugin-filer,skirsdeda/cmsplugin-filer,centralniak/cmsplugin-filer,jschneier/cmsplugin-filer,yvess/cmsplugin-filer,wlanslovenija/cmsplugin-filer,eliasp/cmsplugin-filer,stefanfoulis/cmsplugin-filer,divio/cmsplugin-filer,creimers/cmsplugin-filer,jschneier/cmsplugin-filer,skirsdeda/cmsplugin-filer,yvess/cmsplugin-filer,pbs/cmsplugin-filer,ImaginaryLandscape/cmsplugin-filer,pbs/cmsplugin-filer,isotoma/cmsplugin-filer,NB-Dev/cmsplugin-filer
|
- from posixpath import exists
-
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.file import FilerFileField
from cmsplugin_filer_utils import FilerPluginManager
class FilerFile(CMSPlugin):
"""
Plugin for storing any type of file.
Default template displays download link with icon (if available) and file size.
This could be updated to use the mimetypes library to determine the type of file rather than
storing a separate icon for each different extension.
The icon search is currently performed within get_icon_url; this is probably a performance concern.
"""
title = models.CharField(_("title"), max_length=255, null=True, blank=True)
file = FilerFileField(verbose_name=_('file'))
target_blank = models.BooleanField(_('Open link in new window'), default=False)
objects = FilerPluginManager(select_related=('file',))
def get_icon_url(self):
return self.file.icons['32']
def file_exists(self):
- return exists(self.file.path)
+ return self.file.file.storage.exists(self.file.path)
def get_file_name(self):
+ if self.file.name in ('', None):
+ name = u"%s" % (self.file.original_filename,)
+ else:
+ name = u"%s" % (self.file.name,)
- return self.file.name
+ return name
def get_ext(self):
return self.file.extension
def __unicode__(self):
if self.title:
return self.title
elif self.file:
# added if, because it raised attribute error when file wasnt defined
return self.get_file_name()
return "<empty>"
search_fields = ('title',)
|
Use the file's storage to determine whether the file exists or not. The existing implementation that uses posixpath.exists only works if the storage backend is the default FileSystemStorage
|
## Code Before:
from posixpath import exists
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.file import FilerFileField
from cmsplugin_filer_utils import FilerPluginManager
class FilerFile(CMSPlugin):
"""
Plugin for storing any type of file.
Default template displays download link with icon (if available) and file size.
This could be updated to use the mimetypes library to determine the type of file rather than
storing a separate icon for each different extension.
The icon search is currently performed within get_icon_url; this is probably a performance concern.
"""
title = models.CharField(_("title"), max_length=255, null=True, blank=True)
file = FilerFileField(verbose_name=_('file'))
target_blank = models.BooleanField(_('Open link in new window'), default=False)
objects = FilerPluginManager(select_related=('file',))
def get_icon_url(self):
return self.file.icons['32']
def file_exists(self):
return exists(self.file.path)
def get_file_name(self):
return self.file.name
def get_ext(self):
return self.file.extension
def __unicode__(self):
if self.title:
return self.title
elif self.file:
# added if, because it raised attribute error when file wasnt defined
return self.get_file_name()
return "<empty>"
search_fields = ('title',)
## Instruction:
Use the file's storage to determine whether the file exists or not. The existing implementation that uses posixpath.exists only works if the storage backend is the default FileSystemStorage
## Code After:
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.file import FilerFileField
from cmsplugin_filer_utils import FilerPluginManager
class FilerFile(CMSPlugin):
"""
Plugin for storing any type of file.
Default template displays download link with icon (if available) and file size.
This could be updated to use the mimetypes library to determine the type of file rather than
storing a separate icon for each different extension.
The icon search is currently performed within get_icon_url; this is probably a performance concern.
"""
title = models.CharField(_("title"), max_length=255, null=True, blank=True)
file = FilerFileField(verbose_name=_('file'))
target_blank = models.BooleanField(_('Open link in new window'), default=False)
objects = FilerPluginManager(select_related=('file',))
def get_icon_url(self):
return self.file.icons['32']
def file_exists(self):
return self.file.file.storage.exists(self.file.path)
def get_file_name(self):
if self.file.name in ('', None):
name = u"%s" % (self.file.original_filename,)
else:
name = u"%s" % (self.file.name,)
return name
def get_ext(self):
return self.file.extension
def __unicode__(self):
if self.title:
return self.title
elif self.file:
# added if, because it raised attribute error when file wasnt defined
return self.get_file_name()
return "<empty>"
search_fields = ('title',)
|
- from posixpath import exists
-
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.file import FilerFileField
from cmsplugin_filer_utils import FilerPluginManager
class FilerFile(CMSPlugin):
"""
Plugin for storing any type of file.
Default template displays download link with icon (if available) and file size.
This could be updated to use the mimetypes library to determine the type of file rather than
storing a separate icon for each different extension.
The icon search is currently performed within get_icon_url; this is probably a performance concern.
"""
title = models.CharField(_("title"), max_length=255, null=True, blank=True)
file = FilerFileField(verbose_name=_('file'))
target_blank = models.BooleanField(_('Open link in new window'), default=False)
objects = FilerPluginManager(select_related=('file',))
def get_icon_url(self):
return self.file.icons['32']
def file_exists(self):
- return exists(self.file.path)
+ return self.file.file.storage.exists(self.file.path)
? +++++++++++++++++++++++
def get_file_name(self):
+ if self.file.name in ('', None):
+ name = u"%s" % (self.file.original_filename,)
+ else:
+ name = u"%s" % (self.file.name,)
- return self.file.name
? ----------
+ return name
def get_ext(self):
return self.file.extension
def __unicode__(self):
if self.title:
return self.title
elif self.file:
# added if, because it raised attribute error when file wasnt defined
return self.get_file_name()
return "<empty>"
search_fields = ('title',)
|
1ff4dab34d4aa6935d4d1b54aa354882790b9b44
|
astroquery/astrometry_net/__init__.py
|
astroquery/astrometry_net/__init__.py
|
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
Add config items for server, timeout
|
Add config items for server, timeout
|
Python
|
bsd-3-clause
|
imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery
|
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
+
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
- )
+ )
+ server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
+ timeout = _config.ConfigItem(60,
+ 'Default timeout for connecting to server')
+
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
Add config items for server, timeout
|
## Code Before:
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
## Instruction:
Add config items for server, timeout
## Code After:
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
+
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
- )
+ )
+ server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
+ timeout = _config.ConfigItem(60,
+ 'Default timeout for connecting to server')
+
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
390f585994f6d021405de9aee3c174b054fb64a7
|
ietfparse/compat/parse.py
|
ietfparse/compat/parse.py
|
__all__ = (
'quote',
'splitnport',
'urlencode',
'urlsplit',
'urlunsplit',
)
try:
from urllib.parse import (
quote, splitnport, urlencode, urlsplit, urlunsplit)
except ImportError:
from urllib import quote, splitnport, urlencode
from urlparse import urlsplit, urlunsplit
|
__all__ = (
'quote',
'splitnport',
'urlencode',
'urlsplit',
'urlunsplit',
)
try:
from urllib.parse import (
quote, splitnport, urlencode, urlsplit, urlunsplit)
except ImportError:
from urllib import quote, splitnport, urlencode as _urlencode
from urlparse import urlsplit, urlunsplit
# urlencode did not encode its parameters in Python 2.x so we
# need to implement that ourselves for compatibility.
def urlencode(query, doseq=0, safe='', encoding=None, errors=None):
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'strict'
def encode_value(v):
try:
return codecs.encode(v, encoding, errors)
except UnicodeError:
raise
except (AttributeError, TypeError):
return str(v)
try:
quoted = []
for name, value in query:
quoted.append((encode_value(name), encode_value(value)))
query = quoted
except UnicodeError:
raise
except (TypeError, ValueError) as exc: # pragma no cover
# doesn't look like a sequence of tuples, maybe a dict?
try:
quoted = {}
for name, value in query.items():
quoted[encode_value(name)] = encode_value(value)
query = quoted
except AttributeError: # not a dictionary either
pass
return _urlencode(query, doseq=doseq)
|
Fix urlencode for Python < 3.0.
|
compat: Fix urlencode for Python < 3.0.
The urlencode function handles encoding in Python 3.x so our
compatibility layer should do so to.
|
Python
|
bsd-3-clause
|
dave-shawley/ietfparse
|
__all__ = (
'quote',
'splitnport',
'urlencode',
'urlsplit',
'urlunsplit',
)
try:
from urllib.parse import (
quote, splitnport, urlencode, urlsplit, urlunsplit)
except ImportError:
- from urllib import quote, splitnport, urlencode
+ from urllib import quote, splitnport, urlencode as _urlencode
from urlparse import urlsplit, urlunsplit
+ # urlencode did not encode its parameters in Python 2.x so we
+ # need to implement that ourselves for compatibility.
+ def urlencode(query, doseq=0, safe='', encoding=None, errors=None):
+
+ if encoding is None:
+ encoding = 'utf-8'
+ if errors is None:
+ errors = 'strict'
+
+ def encode_value(v):
+ try:
+ return codecs.encode(v, encoding, errors)
+ except UnicodeError:
+ raise
+ except (AttributeError, TypeError):
+ return str(v)
+
+ try:
+ quoted = []
+ for name, value in query:
+ quoted.append((encode_value(name), encode_value(value)))
+ query = quoted
+ except UnicodeError:
+ raise
+ except (TypeError, ValueError) as exc: # pragma no cover
+ # doesn't look like a sequence of tuples, maybe a dict?
+ try:
+ quoted = {}
+ for name, value in query.items():
+ quoted[encode_value(name)] = encode_value(value)
+ query = quoted
+ except AttributeError: # not a dictionary either
+ pass
+
+ return _urlencode(query, doseq=doseq)
+
|
Fix urlencode for Python < 3.0.
|
## Code Before:
__all__ = (
'quote',
'splitnport',
'urlencode',
'urlsplit',
'urlunsplit',
)
try:
from urllib.parse import (
quote, splitnport, urlencode, urlsplit, urlunsplit)
except ImportError:
from urllib import quote, splitnport, urlencode
from urlparse import urlsplit, urlunsplit
## Instruction:
Fix urlencode for Python < 3.0.
## Code After:
__all__ = (
'quote',
'splitnport',
'urlencode',
'urlsplit',
'urlunsplit',
)
try:
from urllib.parse import (
quote, splitnport, urlencode, urlsplit, urlunsplit)
except ImportError:
from urllib import quote, splitnport, urlencode as _urlencode
from urlparse import urlsplit, urlunsplit
# urlencode did not encode its parameters in Python 2.x so we
# need to implement that ourselves for compatibility.
def urlencode(query, doseq=0, safe='', encoding=None, errors=None):
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'strict'
def encode_value(v):
try:
return codecs.encode(v, encoding, errors)
except UnicodeError:
raise
except (AttributeError, TypeError):
return str(v)
try:
quoted = []
for name, value in query:
quoted.append((encode_value(name), encode_value(value)))
query = quoted
except UnicodeError:
raise
except (TypeError, ValueError) as exc: # pragma no cover
# doesn't look like a sequence of tuples, maybe a dict?
try:
quoted = {}
for name, value in query.items():
quoted[encode_value(name)] = encode_value(value)
query = quoted
except AttributeError: # not a dictionary either
pass
return _urlencode(query, doseq=doseq)
|
__all__ = (
'quote',
'splitnport',
'urlencode',
'urlsplit',
'urlunsplit',
)
try:
from urllib.parse import (
quote, splitnport, urlencode, urlsplit, urlunsplit)
except ImportError:
- from urllib import quote, splitnport, urlencode
+ from urllib import quote, splitnport, urlencode as _urlencode
? ++++++++++++++
from urlparse import urlsplit, urlunsplit
+
+ # urlencode did not encode its parameters in Python 2.x so we
+ # need to implement that ourselves for compatibility.
+ def urlencode(query, doseq=0, safe='', encoding=None, errors=None):
+
+ if encoding is None:
+ encoding = 'utf-8'
+ if errors is None:
+ errors = 'strict'
+
+ def encode_value(v):
+ try:
+ return codecs.encode(v, encoding, errors)
+ except UnicodeError:
+ raise
+ except (AttributeError, TypeError):
+ return str(v)
+
+ try:
+ quoted = []
+ for name, value in query:
+ quoted.append((encode_value(name), encode_value(value)))
+ query = quoted
+ except UnicodeError:
+ raise
+ except (TypeError, ValueError) as exc: # pragma no cover
+ # doesn't look like a sequence of tuples, maybe a dict?
+ try:
+ quoted = {}
+ for name, value in query.items():
+ quoted[encode_value(name)] = encode_value(value)
+ query = quoted
+ except AttributeError: # not a dictionary either
+ pass
+
+ return _urlencode(query, doseq=doseq)
|
cd9a51ab2fe6b99c0665b8f499363a4d557b4a4d
|
DataWrangling/CaseStudy/sample_file.py
|
DataWrangling/CaseStudy/sample_file.py
|
import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow
import os
OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file
SAMPLE_FILE = "sample_sfb.osm"
k = 20 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def main():
os.chdir('./data')
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
|
import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow
import os
OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file
SAMPLE_FILE = "sample_sfb.osm"
k = 20 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def main():
os.chdir('./data')
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
if __name__ == '__main__':
main()
|
Modify script which split your region in smaller sample
|
feat: Modify script which split your region in smaller sample
|
Python
|
mit
|
aguijarro/DataSciencePython
|
import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow
import os
OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file
SAMPLE_FILE = "sample_sfb.osm"
k = 20 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def main():
os.chdir('./data')
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
+
+ if __name__ == '__main__':
+ main()
|
Modify script which split your region in smaller sample
|
## Code Before:
import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow
import os
OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file
SAMPLE_FILE = "sample_sfb.osm"
k = 20 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def main():
os.chdir('./data')
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
## Instruction:
Modify script which split your region in smaller sample
## Code After:
import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow
import os
OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file
SAMPLE_FILE = "sample_sfb.osm"
k = 20 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def main():
os.chdir('./data')
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
if __name__ == '__main__':
main()
|
import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow
import os
OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file
SAMPLE_FILE = "sample_sfb.osm"
k = 20 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def main():
os.chdir('./data')
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
+
+ if __name__ == '__main__':
+ main()
|
afbe8ddff1791084aa1bcad775f1b01481b72c2b
|
larvae/person.py
|
larvae/person.py
|
from larvae.base import LarvaeBase
class Person(LarvaeBase):
"""
Details for a Person in Popolo format.
"""
_schema_name = "person"
__slots__ = ('name', '_id', 'gender', 'birth_date',
'death_date', 'image', 'summary', 'biography', 'links',
'other_names', 'extras', 'contact_details', 'openstates_id',
'chamber', 'district')
_other_name_slots = ('name', 'start_date', 'end_date', 'note')
def __init__(self, name, **kwargs):
super(Person, self).__init__()
self.name = name
for k, v in kwargs.items():
setattr(self, k, v)
self.links = []
self.other_names = []
self.extras = {}
def add_name(self, name, **kwargs):
other_name = {'name': name}
for k, v in kwargs.items():
if k not in self._other_name_slots:
raise AttributeError('{0} not a valid kwarg for add_name'
.format(k))
other_name[k] = v
self.other_names.append(other_name)
def add_link(self, url, note):
self.links.append({"note": note, "url": url})
def __unicode__(self):
return self.name
__str__ = __unicode__
|
from larvae.base import LarvaeBase
class Person(LarvaeBase):
"""
Details for a Person in Popolo format.
"""
_schema_name = "person"
__slots__ = ('name', '_id', 'gender', 'birth_date',
'death_date', 'image', 'summary', 'biography', 'links',
'other_names', 'extras', 'contact_details', 'openstates_id',
'chamber', 'district')
_other_name_slots = ('name', 'start_date', 'end_date', 'note')
def __init__(self, name, **kwargs):
super(Person, self).__init__()
self.name = name
self.links = []
self.other_names = []
self.extras = {}
for k, v in kwargs.items():
setattr(self, k, v)
def add_name(self, name, **kwargs):
other_name = {'name': name}
for k, v in kwargs.items():
if k not in self._other_name_slots:
raise AttributeError('{0} not a valid kwarg for add_name'
.format(k))
other_name[k] = v
self.other_names.append(other_name)
def add_link(self, url, note):
self.links.append({"note": note, "url": url})
def __unicode__(self):
return self.name
__str__ = __unicode__
|
Move default value assignments before kwargs
|
Move default value assignments before kwargs
|
Python
|
bsd-3-clause
|
AGarrow/larvae
|
from larvae.base import LarvaeBase
class Person(LarvaeBase):
"""
Details for a Person in Popolo format.
"""
_schema_name = "person"
__slots__ = ('name', '_id', 'gender', 'birth_date',
'death_date', 'image', 'summary', 'biography', 'links',
'other_names', 'extras', 'contact_details', 'openstates_id',
'chamber', 'district')
_other_name_slots = ('name', 'start_date', 'end_date', 'note')
def __init__(self, name, **kwargs):
super(Person, self).__init__()
self.name = name
- for k, v in kwargs.items():
- setattr(self, k, v)
-
self.links = []
self.other_names = []
self.extras = {}
+ for k, v in kwargs.items():
+ setattr(self, k, v)
def add_name(self, name, **kwargs):
other_name = {'name': name}
for k, v in kwargs.items():
if k not in self._other_name_slots:
raise AttributeError('{0} not a valid kwarg for add_name'
.format(k))
other_name[k] = v
self.other_names.append(other_name)
def add_link(self, url, note):
self.links.append({"note": note, "url": url})
def __unicode__(self):
return self.name
__str__ = __unicode__
|
Move default value assignments before kwargs
|
## Code Before:
from larvae.base import LarvaeBase
class Person(LarvaeBase):
"""
Details for a Person in Popolo format.
"""
_schema_name = "person"
__slots__ = ('name', '_id', 'gender', 'birth_date',
'death_date', 'image', 'summary', 'biography', 'links',
'other_names', 'extras', 'contact_details', 'openstates_id',
'chamber', 'district')
_other_name_slots = ('name', 'start_date', 'end_date', 'note')
def __init__(self, name, **kwargs):
super(Person, self).__init__()
self.name = name
for k, v in kwargs.items():
setattr(self, k, v)
self.links = []
self.other_names = []
self.extras = {}
def add_name(self, name, **kwargs):
other_name = {'name': name}
for k, v in kwargs.items():
if k not in self._other_name_slots:
raise AttributeError('{0} not a valid kwarg for add_name'
.format(k))
other_name[k] = v
self.other_names.append(other_name)
def add_link(self, url, note):
self.links.append({"note": note, "url": url})
def __unicode__(self):
return self.name
__str__ = __unicode__
## Instruction:
Move default value assignments before kwargs
## Code After:
from larvae.base import LarvaeBase
class Person(LarvaeBase):
"""
Details for a Person in Popolo format.
"""
_schema_name = "person"
__slots__ = ('name', '_id', 'gender', 'birth_date',
'death_date', 'image', 'summary', 'biography', 'links',
'other_names', 'extras', 'contact_details', 'openstates_id',
'chamber', 'district')
_other_name_slots = ('name', 'start_date', 'end_date', 'note')
def __init__(self, name, **kwargs):
super(Person, self).__init__()
self.name = name
self.links = []
self.other_names = []
self.extras = {}
for k, v in kwargs.items():
setattr(self, k, v)
def add_name(self, name, **kwargs):
other_name = {'name': name}
for k, v in kwargs.items():
if k not in self._other_name_slots:
raise AttributeError('{0} not a valid kwarg for add_name'
.format(k))
other_name[k] = v
self.other_names.append(other_name)
def add_link(self, url, note):
self.links.append({"note": note, "url": url})
def __unicode__(self):
return self.name
__str__ = __unicode__
|
from larvae.base import LarvaeBase
class Person(LarvaeBase):
"""
Details for a Person in Popolo format.
"""
_schema_name = "person"
__slots__ = ('name', '_id', 'gender', 'birth_date',
'death_date', 'image', 'summary', 'biography', 'links',
'other_names', 'extras', 'contact_details', 'openstates_id',
'chamber', 'district')
_other_name_slots = ('name', 'start_date', 'end_date', 'note')
def __init__(self, name, **kwargs):
super(Person, self).__init__()
self.name = name
- for k, v in kwargs.items():
- setattr(self, k, v)
-
self.links = []
self.other_names = []
self.extras = {}
+ for k, v in kwargs.items():
+ setattr(self, k, v)
def add_name(self, name, **kwargs):
other_name = {'name': name}
for k, v in kwargs.items():
if k not in self._other_name_slots:
raise AttributeError('{0} not a valid kwarg for add_name'
.format(k))
other_name[k] = v
self.other_names.append(other_name)
def add_link(self, url, note):
self.links.append({"note": note, "url": url})
def __unicode__(self):
return self.name
__str__ = __unicode__
|
a15c8bce9c59dcba3e7143903d95feb85ee7abe5
|
tests/ex12_tests.py
|
tests/ex12_tests.py
|
from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
# assert_equal(test_histogram, '*\n**\n***\n')
|
from nose.tools import *
from exercises import ex12
try:
from io import StringIO
except:
from StringIO import StringIO
import sys
def test_histogram():
'''
Test our histogram output is correct
'''
std_out = sys.stdout
result = StringIO()
sys.stdout = result
test_histogram = ex12.histogram([1, 2, 3])
sys.stdout = std_out
result_string = result.getvalue()
assert_equal(result_string, '*\n**\n***\n')
|
Update ex12 test so it actually reads output.
|
Update ex12 test so it actually reads output.
|
Python
|
mit
|
gravyboat/python-exercises
|
from nose.tools import *
from exercises import ex12
+ try:
+ from io import StringIO
+ except:
+ from StringIO import StringIO
+ import sys
def test_histogram():
'''
Test our histogram output is correct
'''
+ std_out = sys.stdout
+ result = StringIO()
+ sys.stdout = result
+
test_histogram = ex12.histogram([1, 2, 3])
+ sys.stdout = std_out
+
+ result_string = result.getvalue()
+
- # assert_equal(test_histogram, '*\n**\n***\n')
+ assert_equal(result_string, '*\n**\n***\n')
|
Update ex12 test so it actually reads output.
|
## Code Before:
from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
# assert_equal(test_histogram, '*\n**\n***\n')
## Instruction:
Update ex12 test so it actually reads output.
## Code After:
from nose.tools import *
from exercises import ex12
try:
from io import StringIO
except:
from StringIO import StringIO
import sys
def test_histogram():
'''
Test our histogram output is correct
'''
std_out = sys.stdout
result = StringIO()
sys.stdout = result
test_histogram = ex12.histogram([1, 2, 3])
sys.stdout = std_out
result_string = result.getvalue()
assert_equal(result_string, '*\n**\n***\n')
|
from nose.tools import *
from exercises import ex12
+ try:
+ from io import StringIO
+ except:
+ from StringIO import StringIO
+ import sys
def test_histogram():
'''
Test our histogram output is correct
'''
+ std_out = sys.stdout
+ result = StringIO()
+ sys.stdout = result
+
test_histogram = ex12.histogram([1, 2, 3])
- # assert_equal(test_histogram, '*\n**\n***\n')
+ sys.stdout = std_out
+ result_string = result.getvalue()
+
+ assert_equal(result_string, '*\n**\n***\n')
+
|
7784186509e41c72bcf7a4ebbd9b268b49449d35
|
user_clipboard/urls.py
|
user_clipboard/urls.py
|
from django.conf.urls import patterns, url
from .views import ClipboardFileAPIView, ClipboardImageAPIView
urlpatterns = patterns(
'',
url(r'^images/(?P<pk>\d+)$', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^images/', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^(?P<pk>\d+)$', ClipboardFileAPIView.as_view(), name="clipboard"),
url(r'^', ClipboardFileAPIView.as_view(), name="clipboard"),
)
|
from django.conf.urls import url
from .views import ClipboardFileAPIView, ClipboardImageAPIView
urlpatterns = [
url(r'^images/(?P<pk>\d+)$', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^images/', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^(?P<pk>\d+)$', ClipboardFileAPIView.as_view(), name="clipboard"),
url(r'^', ClipboardFileAPIView.as_view(), name="clipboard"),
]
|
Define urlpatterns as a pure list (don't call patterns)
|
Define urlpatterns as a pure list (don't call patterns)
|
Python
|
mit
|
MagicSolutions/django-user-clipboard,IndustriaTech/django-user-clipboard,MagicSolutions/django-user-clipboard,IndustriaTech/django-user-clipboard
|
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
from .views import ClipboardFileAPIView, ClipboardImageAPIView
+ urlpatterns = [
- urlpatterns = patterns(
- '',
url(r'^images/(?P<pk>\d+)$', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^images/', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^(?P<pk>\d+)$', ClipboardFileAPIView.as_view(), name="clipboard"),
url(r'^', ClipboardFileAPIView.as_view(), name="clipboard"),
- )
+ ]
|
Define urlpatterns as a pure list (don't call patterns)
|
## Code Before:
from django.conf.urls import patterns, url
from .views import ClipboardFileAPIView, ClipboardImageAPIView
urlpatterns = patterns(
'',
url(r'^images/(?P<pk>\d+)$', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^images/', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^(?P<pk>\d+)$', ClipboardFileAPIView.as_view(), name="clipboard"),
url(r'^', ClipboardFileAPIView.as_view(), name="clipboard"),
)
## Instruction:
Define urlpatterns as a pure list (don't call patterns)
## Code After:
from django.conf.urls import url
from .views import ClipboardFileAPIView, ClipboardImageAPIView
urlpatterns = [
url(r'^images/(?P<pk>\d+)$', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^images/', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^(?P<pk>\d+)$', ClipboardFileAPIView.as_view(), name="clipboard"),
url(r'^', ClipboardFileAPIView.as_view(), name="clipboard"),
]
|
- from django.conf.urls import patterns, url
? ----------
+ from django.conf.urls import url
from .views import ClipboardFileAPIView, ClipboardImageAPIView
+ urlpatterns = [
- urlpatterns = patterns(
- '',
url(r'^images/(?P<pk>\d+)$', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^images/', ClipboardImageAPIView.as_view(), name="clipboard_images"),
url(r'^(?P<pk>\d+)$', ClipboardFileAPIView.as_view(), name="clipboard"),
url(r'^', ClipboardFileAPIView.as_view(), name="clipboard"),
- )
+ ]
|
1af3cc43ae482549ee058e801b4f65e2af78653c
|
grow/testing/testdata/pod/extensions/preprocessors.py
|
grow/testing/testdata/pod/extensions/preprocessors.py
|
from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
Update preprocessor testdata to use grow.Preprocessor.
|
Update preprocessor testdata to use grow.Preprocessor.
|
Python
|
mit
|
grow/pygrow,denmojo/pygrow,grow/grow,grow/grow,grow/pygrow,denmojo/pygrow,denmojo/pygrow,grow/grow,denmojo/pygrow,grow/grow,grow/pygrow
|
- from grow import Preprocessor
+ import grow
from protorpc import messages
- class CustomPreprocessor(Preprocessor):
+ class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
Update preprocessor testdata to use grow.Preprocessor.
|
## Code Before:
from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
## Instruction:
Update preprocessor testdata to use grow.Preprocessor.
## Code After:
import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
- from grow import Preprocessor
+ import grow
from protorpc import messages
- class CustomPreprocessor(Preprocessor):
+ class CustomPreprocessor(grow.Preprocessor):
? +++++
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
35fb8c91bac3d68d255223b20dbbfd84ab34b3b1
|
quant/ichimoku/ichimoku_test.py
|
quant/ichimoku/ichimoku_test.py
|
import pandas as pd
import numpy as np
import os
#from ta import ichimoku
from util import get_data, plot_data
from pandas import DataFrame, Series
from technical_analysis import ichimoku
from datetime import datetime, timedelta,date
import os
import time
import sys
import getopt,argparse
def test_run(stock='000725'):
duration = 360
#now=datetime.now()
today=date.today()
ndays_ago=today-timedelta(duration)
#print(str(n)+" days ago:\n"+str(ndays_ago))
start_date=str(ndays_ago)
end_date =str(today)
df = get_data(stock,start_date, end_date)
plot_data(df,ichimoku(df['close']),title=stock)
def usage():
print (sys.argv[0] + ' -s stock id')
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], "s:")
stock_list=''
single_stock=False
stock_selected="002281"
for op, value in opts:
if op == '-s':
stock_selected = value
elif op == '-h':
usage()
sys.exit()
test_run(stock_selected)
|
import pandas as pd
import numpy as np
import os
#from ta import ichimoku
from util import get_data, plot_data
from pandas import DataFrame, Series
from technical_analysis import ichimoku
from datetime import datetime, timedelta,date
import os
import time
import sys
import getopt,argparse
MAX_ROLLING = 100
def test_run(stock='000725'):
duration = 360
#now=datetime.now()
today=date.today()
ndays_ago=today-timedelta(duration+MAX_ROLLING)
start_date=str(ndays_ago)
end_date =str(today)
df = get_data(stock,start_date, end_date)
plot_data(df[MAX_ROLLING:],ichimoku(df['close'])[MAX_ROLLING:],title=stock)
def usage():
print (sys.argv[0] + ' -s stock id')
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], "s:")
stock_list=''
single_stock=False
stock_selected="002281"
for op, value in opts:
if op == '-s':
stock_selected = value
elif op == '-h':
usage()
sys.exit()
test_run(stock_selected)
|
Add the missing data for ichimoku with additional data fed
|
Add the missing data for ichimoku with additional data fed
|
Python
|
apache-2.0
|
yunfeiz/py_learnt
|
import pandas as pd
import numpy as np
import os
#from ta import ichimoku
from util import get_data, plot_data
from pandas import DataFrame, Series
from technical_analysis import ichimoku
from datetime import datetime, timedelta,date
import os
import time
import sys
import getopt,argparse
-
+ MAX_ROLLING = 100
def test_run(stock='000725'):
duration = 360
#now=datetime.now()
today=date.today()
- ndays_ago=today-timedelta(duration)
+ ndays_ago=today-timedelta(duration+MAX_ROLLING)
- #print(str(n)+" days ago:\n"+str(ndays_ago))
start_date=str(ndays_ago)
end_date =str(today)
df = get_data(stock,start_date, end_date)
- plot_data(df,ichimoku(df['close']),title=stock)
+ plot_data(df[MAX_ROLLING:],ichimoku(df['close'])[MAX_ROLLING:],title=stock)
def usage():
print (sys.argv[0] + ' -s stock id')
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], "s:")
stock_list=''
single_stock=False
stock_selected="002281"
for op, value in opts:
if op == '-s':
stock_selected = value
elif op == '-h':
usage()
sys.exit()
test_run(stock_selected)
+
|
Add the missing data for ichimoku with additional data fed
|
## Code Before:
import pandas as pd
import numpy as np
import os
#from ta import ichimoku
from util import get_data, plot_data
from pandas import DataFrame, Series
from technical_analysis import ichimoku
from datetime import datetime, timedelta,date
import os
import time
import sys
import getopt,argparse
def test_run(stock='000725'):
duration = 360
#now=datetime.now()
today=date.today()
ndays_ago=today-timedelta(duration)
#print(str(n)+" days ago:\n"+str(ndays_ago))
start_date=str(ndays_ago)
end_date =str(today)
df = get_data(stock,start_date, end_date)
plot_data(df,ichimoku(df['close']),title=stock)
def usage():
print (sys.argv[0] + ' -s stock id')
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], "s:")
stock_list=''
single_stock=False
stock_selected="002281"
for op, value in opts:
if op == '-s':
stock_selected = value
elif op == '-h':
usage()
sys.exit()
test_run(stock_selected)
## Instruction:
Add the missing data for ichimoku with additional data fed
## Code After:
import pandas as pd
import numpy as np
import os
#from ta import ichimoku
from util import get_data, plot_data
from pandas import DataFrame, Series
from technical_analysis import ichimoku
from datetime import datetime, timedelta,date
import os
import time
import sys
import getopt,argparse
MAX_ROLLING = 100
def test_run(stock='000725'):
duration = 360
#now=datetime.now()
today=date.today()
ndays_ago=today-timedelta(duration+MAX_ROLLING)
start_date=str(ndays_ago)
end_date =str(today)
df = get_data(stock,start_date, end_date)
plot_data(df[MAX_ROLLING:],ichimoku(df['close'])[MAX_ROLLING:],title=stock)
def usage():
print (sys.argv[0] + ' -s stock id')
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], "s:")
stock_list=''
single_stock=False
stock_selected="002281"
for op, value in opts:
if op == '-s':
stock_selected = value
elif op == '-h':
usage()
sys.exit()
test_run(stock_selected)
|
import pandas as pd
import numpy as np
import os
#from ta import ichimoku
from util import get_data, plot_data
from pandas import DataFrame, Series
from technical_analysis import ichimoku
from datetime import datetime, timedelta,date
import os
import time
import sys
import getopt,argparse
-
+ MAX_ROLLING = 100
def test_run(stock='000725'):
duration = 360
#now=datetime.now()
today=date.today()
- ndays_ago=today-timedelta(duration)
+ ndays_ago=today-timedelta(duration+MAX_ROLLING)
? ++++++++++++
- #print(str(n)+" days ago:\n"+str(ndays_ago))
start_date=str(ndays_ago)
end_date =str(today)
df = get_data(stock,start_date, end_date)
- plot_data(df,ichimoku(df['close']),title=stock)
+ plot_data(df[MAX_ROLLING:],ichimoku(df['close'])[MAX_ROLLING:],title=stock)
? ++++++++++++++ ++++++++++++++
def usage():
print (sys.argv[0] + ' -s stock id')
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], "s:")
stock_list=''
single_stock=False
stock_selected="002281"
for op, value in opts:
if op == '-s':
stock_selected = value
elif op == '-h':
usage()
sys.exit()
test_run(stock_selected)
|
052042e2f48b7936a6057c18a128f497d5e5b1a4
|
folium/__init__.py
|
folium/__init__.py
|
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook
|
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
|
Make features accessible from root
|
Make features accessible from root
|
Python
|
mit
|
QuLogic/folium,talespaiva/folium,andrewgiessel/folium,themiurgo/folium,shankari/folium,python-visualization/folium,talespaiva/folium,QuLogic/folium,BibMartin/folium,ocefpaf/folium,themiurgo/folium,talespaiva/folium,andrewgiessel/folium,BibMartin/folium,ocefpaf/folium,python-visualization/folium,shankari/folium,shankari/folium,themiurgo/folium,BibMartin/folium,andrewgiessel/folium,talespaiva/folium,QuLogic/folium
|
from __future__ import absolute_import
__version__ = '0.2.0.dev'
- from folium.folium import Map, initialize_notebook
+ from folium.folium import Map, initialize_notebook, CircleMarker
+ from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
+
+ from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
+ ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
+ RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
+
|
Make features accessible from root
|
## Code Before:
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook
## Instruction:
Make features accessible from root
## Code After:
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
|
from __future__ import absolute_import
__version__ = '0.2.0.dev'
- from folium.folium import Map, initialize_notebook
+ from folium.folium import Map, initialize_notebook, CircleMarker
? ++++++++++++++
+
+ from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
+
+ from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
+ ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
+ RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
|
7a8a2556bbeb255c991aa5a39aa04b4fed238a7b
|
kolibri/plugins/setup_wizard/middleware.py
|
kolibri/plugins/setup_wizard/middleware.py
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
Add 'session-list' to constants list.
|
Add 'session-list' to constants list.
|
Python
|
mit
|
DXCanas/kolibri,christianmemije/kolibri,learningequality/kolibri,jonboiser/kolibri,jonboiser/kolibri,rtibbles/kolibri,aronasorman/kolibri,learningequality/kolibri,rtibbles/kolibri,christianmemije/kolibri,jayoshih/kolibri,jayoshih/kolibri,jayoshih/kolibri,learningequality/kolibri,christianmemije/kolibri,DXCanas/kolibri,learningequality/kolibri,lyw07/kolibri,benjaoming/kolibri,lyw07/kolibri,jonboiser/kolibri,DXCanas/kolibri,MingDai/kolibri,lyw07/kolibri,rtibbles/kolibri,mrpau/kolibri,mrpau/kolibri,jonboiser/kolibri,indirectlylit/kolibri,aronasorman/kolibri,benjaoming/kolibri,benjaoming/kolibri,jayoshih/kolibri,DXCanas/kolibri,MingDai/kolibri,mrpau/kolibri,rtibbles/kolibri,benjaoming/kolibri,lyw07/kolibri,aronasorman/kolibri,mrpau/kolibri,MingDai/kolibri,MingDai/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,christianmemije/kolibri,indirectlylit/kolibri,aronasorman/kolibri
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
+ "session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
-
- # manually add session bootstrapping api call to allowed path list
- allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
Add 'session-list' to constants list.
|
## Code Before:
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
## Instruction:
Add 'session-list' to constants list.
## Code After:
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
+ "session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
-
- # manually add session bootstrapping api call to allowed path list
- allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
d2b0aba3e13246193f37758e23f4d26b90552508
|
social_auth/middleware.py
|
social_auth/middleware.py
|
from django.conf import settings
from django.contrib import messages
from django.shortcuts import redirect
from social_auth.backends.exceptions import AuthException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the LOGIN_ERROR_URL setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
if isinstance(exception, AuthException):
backend_name = exception.backend.AUTH_BACKEND.name
message = self.get_message(request, exception)
messages.error(request, message,
extra_tags=u'social-auth {0}'.format(backend_name))
url = self.get_redirect_uri(request, exception)
return redirect(url)
def get_message(self, request, exception):
return unicode(exception)
def get_redirect_uri(self, request, exception):
return settings.LOGIN_ERROR_URL
|
from django.conf import settings
from django.contrib import messages
from django.shortcuts import redirect
from social_auth.backends.exceptions import AuthException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the LOGIN_ERROR_URL setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
if isinstance(exception, AuthException):
backend_name = exception.backend.name
message = self.get_message(request, exception)
messages.error(request, message,
extra_tags=u'social-auth {0}'.format(backend_name))
url = self.get_redirect_uri(request, exception)
return redirect(url)
def get_message(self, request, exception):
return unicode(exception)
def get_redirect_uri(self, request, exception):
return settings.LOGIN_ERROR_URL
|
Correct access of backend name from AuthException
|
Correct access of backend name from AuthException
|
Python
|
bsd-3-clause
|
omab/django-social-auth,duoduo369/django-social-auth,MjAbuz/django-social-auth,beswarm/django-social-auth,getsentry/django-social-auth,omab/django-social-auth,lovehhf/django-social-auth,sk7/django-social-auth,WW-Digital/django-social-auth,qas612820704/django-social-auth,caktus/django-social-auth,gustavoam/django-social-auth,mayankcu/Django-social,vuchau/django-social-auth,caktus/django-social-auth,limdauto/django-social-auth,vuchau/django-social-auth,VishvajitP/django-social-auth,qas612820704/django-social-auth,dongguangming/django-social-auth,VishvajitP/django-social-auth,adw0rd/django-social-auth,MjAbuz/django-social-auth,dongguangming/django-social-auth,gustavoam/django-social-auth,krvss/django-social-auth,lovehhf/django-social-auth,michael-borisov/django-social-auth,michael-borisov/django-social-auth,vxvinh1511/django-social-auth,beswarm/django-social-auth,limdauto/django-social-auth,vxvinh1511/django-social-auth
|
from django.conf import settings
from django.contrib import messages
from django.shortcuts import redirect
from social_auth.backends.exceptions import AuthException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the LOGIN_ERROR_URL setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
if isinstance(exception, AuthException):
- backend_name = exception.backend.AUTH_BACKEND.name
+ backend_name = exception.backend.name
message = self.get_message(request, exception)
messages.error(request, message,
extra_tags=u'social-auth {0}'.format(backend_name))
url = self.get_redirect_uri(request, exception)
return redirect(url)
def get_message(self, request, exception):
return unicode(exception)
def get_redirect_uri(self, request, exception):
return settings.LOGIN_ERROR_URL
|
Correct access of backend name from AuthException
|
## Code Before:
from django.conf import settings
from django.contrib import messages
from django.shortcuts import redirect
from social_auth.backends.exceptions import AuthException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the LOGIN_ERROR_URL setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
if isinstance(exception, AuthException):
backend_name = exception.backend.AUTH_BACKEND.name
message = self.get_message(request, exception)
messages.error(request, message,
extra_tags=u'social-auth {0}'.format(backend_name))
url = self.get_redirect_uri(request, exception)
return redirect(url)
def get_message(self, request, exception):
return unicode(exception)
def get_redirect_uri(self, request, exception):
return settings.LOGIN_ERROR_URL
## Instruction:
Correct access of backend name from AuthException
## Code After:
from django.conf import settings
from django.contrib import messages
from django.shortcuts import redirect
from social_auth.backends.exceptions import AuthException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the LOGIN_ERROR_URL setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
if isinstance(exception, AuthException):
backend_name = exception.backend.name
message = self.get_message(request, exception)
messages.error(request, message,
extra_tags=u'social-auth {0}'.format(backend_name))
url = self.get_redirect_uri(request, exception)
return redirect(url)
def get_message(self, request, exception):
return unicode(exception)
def get_redirect_uri(self, request, exception):
return settings.LOGIN_ERROR_URL
|
from django.conf import settings
from django.contrib import messages
from django.shortcuts import redirect
from social_auth.backends.exceptions import AuthException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the LOGIN_ERROR_URL setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
if isinstance(exception, AuthException):
- backend_name = exception.backend.AUTH_BACKEND.name
? -------------
+ backend_name = exception.backend.name
message = self.get_message(request, exception)
messages.error(request, message,
extra_tags=u'social-auth {0}'.format(backend_name))
url = self.get_redirect_uri(request, exception)
return redirect(url)
def get_message(self, request, exception):
return unicode(exception)
def get_redirect_uri(self, request, exception):
return settings.LOGIN_ERROR_URL
|
6d83f2150f7c6177385b9f2d8abbe48cd2979130
|
events/admin.py
|
events/admin.py
|
from django.contrib import admin
from .models import Calendar,MonthCache
# Register your models here.
@admin.register(Calendar)
class CalendarAdmin(admin.ModelAdmin):
list_display = ('name','remote_id','css_class')
@admin.register(MonthCache)
class MonthCacheAdmin(admin.ModelAdmin):
list_display = ('calendar','month','data_cached_on')
|
from django.contrib import admin
from .models import Calendar,MonthCache
# Register your models here.
@admin.register(Calendar)
class CalendarAdmin(admin.ModelAdmin):
list_display = ('name','remote_id','css_class')
@admin.register(MonthCache)
class MonthCacheAdmin(admin.ModelAdmin):
list_display = ('calendar','month','data_cached_on','is_cache_stale')
|
Add staleness to MonthCache Admin display
|
Add staleness to MonthCache Admin display
|
Python
|
mit
|
Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters
|
from django.contrib import admin
from .models import Calendar,MonthCache
# Register your models here.
@admin.register(Calendar)
class CalendarAdmin(admin.ModelAdmin):
list_display = ('name','remote_id','css_class')
@admin.register(MonthCache)
class MonthCacheAdmin(admin.ModelAdmin):
- list_display = ('calendar','month','data_cached_on')
+ list_display = ('calendar','month','data_cached_on','is_cache_stale')
|
Add staleness to MonthCache Admin display
|
## Code Before:
from django.contrib import admin
from .models import Calendar,MonthCache
# Register your models here.
@admin.register(Calendar)
class CalendarAdmin(admin.ModelAdmin):
list_display = ('name','remote_id','css_class')
@admin.register(MonthCache)
class MonthCacheAdmin(admin.ModelAdmin):
list_display = ('calendar','month','data_cached_on')
## Instruction:
Add staleness to MonthCache Admin display
## Code After:
from django.contrib import admin
from .models import Calendar,MonthCache
# Register your models here.
@admin.register(Calendar)
class CalendarAdmin(admin.ModelAdmin):
list_display = ('name','remote_id','css_class')
@admin.register(MonthCache)
class MonthCacheAdmin(admin.ModelAdmin):
list_display = ('calendar','month','data_cached_on','is_cache_stale')
|
from django.contrib import admin
from .models import Calendar,MonthCache
# Register your models here.
@admin.register(Calendar)
class CalendarAdmin(admin.ModelAdmin):
list_display = ('name','remote_id','css_class')
@admin.register(MonthCache)
class MonthCacheAdmin(admin.ModelAdmin):
- list_display = ('calendar','month','data_cached_on')
+ list_display = ('calendar','month','data_cached_on','is_cache_stale')
? +++++++++++++++++
|
407a032acb307e5f936437aec4975ef69133d0c5
|
DisplayAdapter/testing/test_display_adapter/test_display_driver/test_display_drivers.py
|
DisplayAdapter/testing/test_display_adapter/test_display_driver/test_display_drivers.py
|
from mock import patch
from display_adapter.display_driver.display_drivers import DisplayDriver
class TestDisplayDriver(object):
"""
This class tests the functionality of the DisplayDriver class, ensuring that it can correctly connect the raspberry
pi to the display.
"""
@patch("serial.Serial")
def test_init(self, dc_mock):
"""
This method tests initialisation of the display driver, ensuring it has correctly set up the database helper
and the display controller.
"""
dd = DisplayDriver()
assert dd
assert hasattr(dd, "_db_helper")
assert hasattr(dd, "_display_controller")
|
from mock import patch
from datetime import datetime
from display_adapter.display_driver.display_drivers import minutify, DisplayDriver
class TestDisplayDriver(object):
"""
This class tests the functionality of the DisplayDriver class, ensuring that it can correctly connect the raspberry
pi to the display.
"""
@patch("serial.Serial")
def test_init(self, dc_mock):
"""
This method tests initialisation of the display driver, ensuring it has correctly set up the database helper
and the display controller.
"""
dd = DisplayDriver()
assert dd
assert hasattr(dd, "_db_helper")
assert hasattr(dd, "_display_controller")
def test_minutify():
"""
This function tests the functionality of the minutify function linked to the Display Driver. The expected result
of this test is for a datetime object to be made accurate to the minute.
"""
dt = datetime.now().replace(second=30, microsecond=40000)
accurate_dt = minutify(dt)
# Assert the datetime object has been minutified correctly (seconds and microseconds == 0)
assert accurate_dt.second == 0 and accurate_dt.microsecond == 0
|
Test functionality has been added. Paired by Richard and Michael.
|
Test functionality has been added. Paired by Richard and Michael.
The functionality for the minutify function has now been tested; and the tests work (Support 231)
|
Python
|
mit
|
CO600GOL/Game_of_life,CO600GOL/Game_of_life,CO600GOL/Game_of_life
|
from mock import patch
+ from datetime import datetime
- from display_adapter.display_driver.display_drivers import DisplayDriver
+ from display_adapter.display_driver.display_drivers import minutify, DisplayDriver
class TestDisplayDriver(object):
"""
This class tests the functionality of the DisplayDriver class, ensuring that it can correctly connect the raspberry
pi to the display.
"""
@patch("serial.Serial")
def test_init(self, dc_mock):
"""
This method tests initialisation of the display driver, ensuring it has correctly set up the database helper
and the display controller.
"""
dd = DisplayDriver()
assert dd
assert hasattr(dd, "_db_helper")
assert hasattr(dd, "_display_controller")
+
+
+ def test_minutify():
+ """
+ This function tests the functionality of the minutify function linked to the Display Driver. The expected result
+ of this test is for a datetime object to be made accurate to the minute.
+ """
+
+ dt = datetime.now().replace(second=30, microsecond=40000)
+ accurate_dt = minutify(dt)
+ # Assert the datetime object has been minutified correctly (seconds and microseconds == 0)
+ assert accurate_dt.second == 0 and accurate_dt.microsecond == 0
+
|
Test functionality has been added. Paired by Richard and Michael.
|
## Code Before:
from mock import patch
from display_adapter.display_driver.display_drivers import DisplayDriver
class TestDisplayDriver(object):
"""
This class tests the functionality of the DisplayDriver class, ensuring that it can correctly connect the raspberry
pi to the display.
"""
@patch("serial.Serial")
def test_init(self, dc_mock):
"""
This method tests initialisation of the display driver, ensuring it has correctly set up the database helper
and the display controller.
"""
dd = DisplayDriver()
assert dd
assert hasattr(dd, "_db_helper")
assert hasattr(dd, "_display_controller")
## Instruction:
Test functionality has been added. Paired by Richard and Michael.
## Code After:
from mock import patch
from datetime import datetime
from display_adapter.display_driver.display_drivers import minutify, DisplayDriver
class TestDisplayDriver(object):
"""
This class tests the functionality of the DisplayDriver class, ensuring that it can correctly connect the raspberry
pi to the display.
"""
@patch("serial.Serial")
def test_init(self, dc_mock):
"""
This method tests initialisation of the display driver, ensuring it has correctly set up the database helper
and the display controller.
"""
dd = DisplayDriver()
assert dd
assert hasattr(dd, "_db_helper")
assert hasattr(dd, "_display_controller")
def test_minutify():
"""
This function tests the functionality of the minutify function linked to the Display Driver. The expected result
of this test is for a datetime object to be made accurate to the minute.
"""
dt = datetime.now().replace(second=30, microsecond=40000)
accurate_dt = minutify(dt)
# Assert the datetime object has been minutified correctly (seconds and microseconds == 0)
assert accurate_dt.second == 0 and accurate_dt.microsecond == 0
|
from mock import patch
+ from datetime import datetime
- from display_adapter.display_driver.display_drivers import DisplayDriver
+ from display_adapter.display_driver.display_drivers import minutify, DisplayDriver
? ++++++++++
class TestDisplayDriver(object):
"""
This class tests the functionality of the DisplayDriver class, ensuring that it can correctly connect the raspberry
pi to the display.
"""
@patch("serial.Serial")
def test_init(self, dc_mock):
"""
This method tests initialisation of the display driver, ensuring it has correctly set up the database helper
and the display controller.
"""
dd = DisplayDriver()
assert dd
assert hasattr(dd, "_db_helper")
assert hasattr(dd, "_display_controller")
+
+
+ def test_minutify():
+ """
+ This function tests the functionality of the minutify function linked to the Display Driver. The expected result
+ of this test is for a datetime object to be made accurate to the minute.
+ """
+
+ dt = datetime.now().replace(second=30, microsecond=40000)
+ accurate_dt = minutify(dt)
+ # Assert the datetime object has been minutified correctly (seconds and microseconds == 0)
+ assert accurate_dt.second == 0 and accurate_dt.microsecond == 0
|
539f78c8ea4ca1692ae27a2d0bdc01004b5ad471
|
examples/plot_humidity.py
|
examples/plot_humidity.py
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
Modify example to make camera vs. outside humidity plot
|
Modify example to make camera vs. outside humidity plot
|
Python
|
mit
|
fact-project/aux2mongodb
|
import matplotlib.pyplot as plt
- from aux2mongodb import MagicWeather
+ from aux2mongodb import MagicWeather, PfMini
- from datetime import date
+ import pandas as pd
+ from tqdm import tqdm
+ import datetime
+
+ plt.style.use('ggplot')
- m = MagicWeather(auxdir='/fact/aux')
+ magic_weather = MagicWeather(auxdir='/fact/aux')
+ pf_mini = PfMini(auxdir='/fact/aux')
+ dates = pd.date_range('2015-10-20', datetime.date.today())
- df = m.read_date(date(2015, 12, 31))
+ outside = pd.DataFrame()
+ camera = pd.DataFrame()
+ for d in tqdm(dates):
+ try:
+ outside = outside.append(magic_weather.read_date(d), ignore_index=True)
+ except FileNotFoundError:
+ continue
+ try:
+ camera = camera.append(pf_mini.read_date(d), ignore_index=True)
+ except FileNotFoundError:
+ continue
- df.plot(x='timestamp', y='humidity', legend=False)
+ outside.set_index('timestamp', inplace=True)
+ camera.set_index('timestamp', inplace=True)
+ outside = outside.resample('24h').mean()
+ camera = camera.resample('24h').mean()
+
+ fig, ax = plt.subplots()
+ ax.set_title('Camera vs. Outside Humidity (24h mean)')
+
+ outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
+ camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
+
+ ax.legend()
- plt.ylabel('Humidity / %')
+ ax.set_ylabel('Humidity / %')
+ fig.tight_layout()
plt.show()
|
Modify example to make camera vs. outside humidity plot
|
## Code Before:
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
## Instruction:
Modify example to make camera vs. outside humidity plot
## Code After:
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
import matplotlib.pyplot as plt
- from aux2mongodb import MagicWeather
+ from aux2mongodb import MagicWeather, PfMini
? ++++++++
- from datetime import date
+ import pandas as pd
+ from tqdm import tqdm
+ import datetime
+
+ plt.style.use('ggplot')
- m = MagicWeather(auxdir='/fact/aux')
+ magic_weather = MagicWeather(auxdir='/fact/aux')
? ++++++++++++
+ pf_mini = PfMini(auxdir='/fact/aux')
+ dates = pd.date_range('2015-10-20', datetime.date.today())
- df = m.read_date(date(2015, 12, 31))
+ outside = pd.DataFrame()
+ camera = pd.DataFrame()
+ for d in tqdm(dates):
+ try:
+ outside = outside.append(magic_weather.read_date(d), ignore_index=True)
+ except FileNotFoundError:
+ continue
+ try:
+ camera = camera.append(pf_mini.read_date(d), ignore_index=True)
+ except FileNotFoundError:
+ continue
- df.plot(x='timestamp', y='humidity', legend=False)
+ outside.set_index('timestamp', inplace=True)
+ camera.set_index('timestamp', inplace=True)
+ outside = outside.resample('24h').mean()
+ camera = camera.resample('24h').mean()
+
+ fig, ax = plt.subplots()
+ ax.set_title('Camera vs. Outside Humidity (24h mean)')
+
+ outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
+ camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
+
+ ax.legend()
- plt.ylabel('Humidity / %')
? ^^ ^
+ ax.set_ylabel('Humidity / %')
? ^^^^^ ^
+ fig.tight_layout()
plt.show()
|
1c736a5f48b2deb9732c65a5dec7ea47e542f6f4
|
thinc/neural/_classes/resnet.py
|
thinc/neural/_classes/resnet.py
|
from .model import Model
from ...api import layerize
from .affine import Affine
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layers[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
|
from .model import Model
from ...api import layerize
from .affine import Affine
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
Y = self._layers[0](X)
if isinstance(X, list) or isinstance(X, tuple):
return [X[i]+Y[i] for i in range(len(X))]
else:
return X + Y
def begin_update(self, X, drop=0.):
y, bp_y = self._layers[0].begin_update(X, drop=drop)
if isinstance(X, list) or isinstance(X, tuple):
output = [X[i]+y[i] for i in range(len(X))]
else:
output = X+y
def residual_bwd(d_output, sgd=None):
dX = bp_y(d_output, sgd)
if isinstance(d_output, list) or isinstance(d_output, tuple):
return [d_output[i]+dX[i] for i in range(len(d_output))]
else:
return d_output + dX
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
|
Make residual connections work for list-valued inputs
|
Make residual connections work for list-valued inputs
|
Python
|
mit
|
spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
from .model import Model
from ...api import layerize
from .affine import Affine
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
- return X + self._layers[0](X)
+ Y = self._layers[0](X)
+ if isinstance(X, list) or isinstance(X, tuple):
+ return [X[i]+Y[i] for i in range(len(X))]
+ else:
+ return X + Y
def begin_update(self, X, drop=0.):
y, bp_y = self._layers[0].begin_update(X, drop=drop)
+ if isinstance(X, list) or isinstance(X, tuple):
+ output = [X[i]+y[i] for i in range(len(X))]
+ else:
- output = X+y
+ output = X+y
def residual_bwd(d_output, sgd=None):
- return d_output + bp_y(d_output, sgd)
+ dX = bp_y(d_output, sgd)
+ if isinstance(d_output, list) or isinstance(d_output, tuple):
+ return [d_output[i]+dX[i] for i in range(len(d_output))]
+ else:
+ return d_output + dX
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
|
Make residual connections work for list-valued inputs
|
## Code Before:
from .model import Model
from ...api import layerize
from .affine import Affine
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layers[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
## Instruction:
Make residual connections work for list-valued inputs
## Code After:
from .model import Model
from ...api import layerize
from .affine import Affine
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
Y = self._layers[0](X)
if isinstance(X, list) or isinstance(X, tuple):
return [X[i]+Y[i] for i in range(len(X))]
else:
return X + Y
def begin_update(self, X, drop=0.):
y, bp_y = self._layers[0].begin_update(X, drop=drop)
if isinstance(X, list) or isinstance(X, tuple):
output = [X[i]+y[i] for i in range(len(X))]
else:
output = X+y
def residual_bwd(d_output, sgd=None):
dX = bp_y(d_output, sgd)
if isinstance(d_output, list) or isinstance(d_output, tuple):
return [d_output[i]+dX[i] for i in range(len(d_output))]
else:
return d_output + dX
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
|
from .model import Model
from ...api import layerize
from .affine import Affine
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
- return X + self._layers[0](X)
? ^^^^^^ ^^^
+ Y = self._layers[0](X)
? ^ ^
+ if isinstance(X, list) or isinstance(X, tuple):
+ return [X[i]+Y[i] for i in range(len(X))]
+ else:
+ return X + Y
def begin_update(self, X, drop=0.):
y, bp_y = self._layers[0].begin_update(X, drop=drop)
+ if isinstance(X, list) or isinstance(X, tuple):
+ output = [X[i]+y[i] for i in range(len(X))]
+ else:
- output = X+y
+ output = X+y
? ++++
def residual_bwd(d_output, sgd=None):
- return d_output + bp_y(d_output, sgd)
? ^^^^^^ ^^^^^^^^^^
+ dX = bp_y(d_output, sgd)
? ^^ ^
+ if isinstance(d_output, list) or isinstance(d_output, tuple):
+ return [d_output[i]+dX[i] for i in range(len(d_output))]
+ else:
+ return d_output + dX
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
|
52c8ee184cc0071187c1915c4f3e6f287f3faa81
|
config/__init__.py
|
config/__init__.py
|
import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PRO_CONF_PATH = '/etc/skylines/production.py'
DEV_CONF_PATH = os.path.join(BASE_PATH, 'default.py')
TESTING_CONF_PATH = os.path.join(BASE_PATH, 'testing.py')
def to_envvar(path=None):
"""
Loads the application configuration from a file.
Returns the configuration or None if no configuration could be found.
"""
if path:
path = os.path.abspath(path)
if not os.path.exists(path):
return
elif os.path.exists(PRO_CONF_PATH):
path = PRO_CONF_PATH
elif os.path.exists(DEV_CONF_PATH):
path = DEV_CONF_PATH
else:
return
os.environ['SKYLINES_CONFIG'] = path
return True
def use_testing():
os.environ['SKYLINES_CONFIG'] = TESTING_CONF_PATH
|
import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PRO_CONF_PATH = '/etc/skylines/production.py'
DEV_CONF_PATH = os.path.join(BASE_PATH, 'default.py')
TESTING_CONF_PATH = os.path.join(BASE_PATH, 'testing.py')
def to_envvar(path=None):
"""
Loads the application configuration from a file.
Returns the configuration or None if no configuration could be found.
"""
if path:
path = os.path.abspath(path)
if not os.path.exists(path):
return
elif os.path.exists(PRO_CONF_PATH):
path = PRO_CONF_PATH
elif os.path.exists(DEV_CONF_PATH):
path = DEV_CONF_PATH
else:
return
os.environ['SKYLINES_CONFIG'] = path
return True
def use_testing():
os.environ['SKYLINES_CONFIG'] = TESTING_CONF_PATH
# Make sure use_testing() is not detected as a unit test by nose
use_testing.__test__ = False
|
Make sure use_testing() is not detected as a unit test by nose
|
config: Make sure use_testing() is not detected as a unit test by nose
|
Python
|
agpl-3.0
|
shadowoneau/skylines,kerel-fs/skylines,Turbo87/skylines,kerel-fs/skylines,snip/skylines,Harry-R/skylines,snip/skylines,RBE-Avionik/skylines,Harry-R/skylines,Turbo87/skylines,RBE-Avionik/skylines,TobiasLohner/SkyLines,skylines-project/skylines,Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,skylines-project/skylines,Harry-R/skylines,TobiasLohner/SkyLines,shadowoneau/skylines,kerel-fs/skylines,RBE-Avionik/skylines,TobiasLohner/SkyLines,shadowoneau/skylines,RBE-Avionik/skylines,shadowoneau/skylines,skylines-project/skylines,Turbo87/skylines,snip/skylines
|
import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PRO_CONF_PATH = '/etc/skylines/production.py'
DEV_CONF_PATH = os.path.join(BASE_PATH, 'default.py')
TESTING_CONF_PATH = os.path.join(BASE_PATH, 'testing.py')
def to_envvar(path=None):
"""
Loads the application configuration from a file.
Returns the configuration or None if no configuration could be found.
"""
if path:
path = os.path.abspath(path)
if not os.path.exists(path):
return
elif os.path.exists(PRO_CONF_PATH):
path = PRO_CONF_PATH
elif os.path.exists(DEV_CONF_PATH):
path = DEV_CONF_PATH
else:
return
os.environ['SKYLINES_CONFIG'] = path
return True
def use_testing():
os.environ['SKYLINES_CONFIG'] = TESTING_CONF_PATH
+ # Make sure use_testing() is not detected as a unit test by nose
+ use_testing.__test__ = False
+
|
Make sure use_testing() is not detected as a unit test by nose
|
## Code Before:
import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PRO_CONF_PATH = '/etc/skylines/production.py'
DEV_CONF_PATH = os.path.join(BASE_PATH, 'default.py')
TESTING_CONF_PATH = os.path.join(BASE_PATH, 'testing.py')
def to_envvar(path=None):
"""
Loads the application configuration from a file.
Returns the configuration or None if no configuration could be found.
"""
if path:
path = os.path.abspath(path)
if not os.path.exists(path):
return
elif os.path.exists(PRO_CONF_PATH):
path = PRO_CONF_PATH
elif os.path.exists(DEV_CONF_PATH):
path = DEV_CONF_PATH
else:
return
os.environ['SKYLINES_CONFIG'] = path
return True
def use_testing():
os.environ['SKYLINES_CONFIG'] = TESTING_CONF_PATH
## Instruction:
Make sure use_testing() is not detected as a unit test by nose
## Code After:
import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PRO_CONF_PATH = '/etc/skylines/production.py'
DEV_CONF_PATH = os.path.join(BASE_PATH, 'default.py')
TESTING_CONF_PATH = os.path.join(BASE_PATH, 'testing.py')
def to_envvar(path=None):
"""
Loads the application configuration from a file.
Returns the configuration or None if no configuration could be found.
"""
if path:
path = os.path.abspath(path)
if not os.path.exists(path):
return
elif os.path.exists(PRO_CONF_PATH):
path = PRO_CONF_PATH
elif os.path.exists(DEV_CONF_PATH):
path = DEV_CONF_PATH
else:
return
os.environ['SKYLINES_CONFIG'] = path
return True
def use_testing():
os.environ['SKYLINES_CONFIG'] = TESTING_CONF_PATH
# Make sure use_testing() is not detected as a unit test by nose
use_testing.__test__ = False
|
import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PRO_CONF_PATH = '/etc/skylines/production.py'
DEV_CONF_PATH = os.path.join(BASE_PATH, 'default.py')
TESTING_CONF_PATH = os.path.join(BASE_PATH, 'testing.py')
def to_envvar(path=None):
"""
Loads the application configuration from a file.
Returns the configuration or None if no configuration could be found.
"""
if path:
path = os.path.abspath(path)
if not os.path.exists(path):
return
elif os.path.exists(PRO_CONF_PATH):
path = PRO_CONF_PATH
elif os.path.exists(DEV_CONF_PATH):
path = DEV_CONF_PATH
else:
return
os.environ['SKYLINES_CONFIG'] = path
return True
def use_testing():
os.environ['SKYLINES_CONFIG'] = TESTING_CONF_PATH
+
+ # Make sure use_testing() is not detected as a unit test by nose
+ use_testing.__test__ = False
|
50510c800e7510b0f918553f0c479a10b3a72deb
|
projections/simpleexpr.py
|
projections/simpleexpr.py
|
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
arrays = filter(lambda v: isinstance(v, np.ndarray), df.values())
res = ma.masked_array(np.full(tuple(arrays)[0].shape, res,
dtype=np.float32))
return res
|
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
res = ma.masked_array(np.full(tuple(df.values())[0].shape, res,
dtype=np.float32))
return res
|
Revert "Improve determination of array shape for constant expressions"
|
Revert "Improve determination of array shape for constant expressions"
This reverts commit c8c9c42f14c742c6fcb180b7a3cc1bab1655ac46.
|
Python
|
apache-2.0
|
ricardog/raster-project,ricardog/raster-project,ricardog/raster-project,ricardog/raster-project,ricardog/raster-project
|
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
- arrays = filter(lambda v: isinstance(v, np.ndarray), df.values())
- res = ma.masked_array(np.full(tuple(arrays)[0].shape, res,
+ res = ma.masked_array(np.full(tuple(df.values())[0].shape, res,
dtype=np.float32))
return res
|
Revert "Improve determination of array shape for constant expressions"
|
## Code Before:
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
arrays = filter(lambda v: isinstance(v, np.ndarray), df.values())
res = ma.masked_array(np.full(tuple(arrays)[0].shape, res,
dtype=np.float32))
return res
## Instruction:
Revert "Improve determination of array shape for constant expressions"
## Code After:
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
res = ma.masked_array(np.full(tuple(df.values())[0].shape, res,
dtype=np.float32))
return res
|
import numpy as np
import numpy.ma as ma
import projections.r2py.reval as reval
import projections.r2py.rparser as rparser
class SimpleExpr():
def __init__(self, name, expr):
self.name = name
self.tree = reval.make_inputs(rparser.parse(expr))
lokals = {}
exec(reval.to_py(self.tree, name), lokals)
self.func = lokals[name + '_st']
@property
def syms(self):
return reval.find_inputs(self.tree)
def eval(self, df):
try:
res = self.func(df)
except KeyError as e:
print("Error: input '%s' not defined" % e)
raise e
if not isinstance(res, np.ndarray):
- arrays = filter(lambda v: isinstance(v, np.ndarray), df.values())
- res = ma.masked_array(np.full(tuple(arrays)[0].shape, res,
? ^^^^
+ res = ma.masked_array(np.full(tuple(df.values())[0].shape, res,
? ++++ ^^^ ++
dtype=np.float32))
return res
|
9771428d7b0c4a2c0fe057e1030024b13344ccc7
|
moa/device/__init__.py
|
moa/device/__init__.py
|
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
|
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
|
Clean the base device class.
|
Clean the base device class.
|
Python
|
mit
|
matham/moa
|
- from moa.threading import CallbackQueue
from moa.base import MoaBase
- from kivy.properties import BooleanProperty
- from kivy.clock import Clock
- try:
- from Queue import Queue
- except ImportError:
- from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
- __events__ = ('on_restart', )
-
- active = BooleanProperty(False)
-
- _kivy_eventloop_queue = None
-
- def __init__(self, allow_async=True, **kwargs):
- super(Device, self).__init__(**kwargs)
-
- if allow_async:
- trigger = Clock.create_trigger(self._do_queue)
- self._kivy_eventloop_queue = CallbackQueue(trigger)
-
- def __del__(self):
- self.deinit()
-
- def _do_queue(self, *largs, **kwargs):
- while 1:
- try:
- key, val = self._kivy_eventloop_queue.get()
- except Queue.Empty:
- return
- if key == 'set':
- setattr(*val)
- elif key == 'call':
- f, l, kw = val
- f(*l, **kw)
-
- def init(self, **kwargs):
+ def activate(self, **kwargs):
pass
- def restart(self, **kwargs):
+ def recover(self, **kwargs):
pass
- def on_restart(self, **kwargs):
+ def deactivate(self, **kwargs):
pass
- def deinit(self, **kwargs):
- pass
-
|
Clean the base device class.
|
## Code Before:
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
## Instruction:
Clean the base device class.
## Code After:
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
|
- from moa.threading import CallbackQueue
from moa.base import MoaBase
- from kivy.properties import BooleanProperty
- from kivy.clock import Clock
- try:
- from Queue import Queue
- except ImportError:
- from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
- __events__ = ('on_restart', )
-
- active = BooleanProperty(False)
-
- _kivy_eventloop_queue = None
-
- def __init__(self, allow_async=True, **kwargs):
- super(Device, self).__init__(**kwargs)
-
- if allow_async:
- trigger = Clock.create_trigger(self._do_queue)
- self._kivy_eventloop_queue = CallbackQueue(trigger)
-
- def __del__(self):
- self.deinit()
-
- def _do_queue(self, *largs, **kwargs):
- while 1:
- try:
- key, val = self._kivy_eventloop_queue.get()
- except Queue.Empty:
- return
- if key == 'set':
- setattr(*val)
- elif key == 'call':
- f, l, kw = val
- f(*l, **kw)
-
- def init(self, **kwargs):
? ^^
+ def activate(self, **kwargs):
? +++ ^^ +
pass
- def restart(self, **kwargs):
? ^^^ -
+ def recover(self, **kwargs):
? ^^^^
pass
- def on_restart(self, **kwargs):
? ^^^^ ^ -
+ def deactivate(self, **kwargs):
? ^ ^^ ++ +
pass
-
- def deinit(self, **kwargs):
- pass
|
64671712fb465a9e940484a5f2f4b8d673aaee75
|
words.py
|
words.py
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
Enforce lowercase on word selection
|
Enforce lowercase on word selection
|
Python
|
mit
|
andrewyang96/HangmanGame
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
+ word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
Enforce lowercase on word selection
|
## Code Before:
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
## Instruction:
Enforce lowercase on word selection
## Code After:
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
+ word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
8d7657ed52a40070136bbbe3da7069dcbe3fc1c3
|
altair/vegalite/v2/examples/stem_and_leaf.py
|
altair/vegalite/v2/examples/stem_and_leaf.py
|
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating Random Data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting Steam and Leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
# Grouping Leafs for each Stem
grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
for key, group in original_data.groupby('stem'):
grouped_data = grouped_data.append({'stem':key,
'leaf': ''.join(group['leaf'].sort_values())},
ignore_index=True)
# Plotting Stems and Leafs
chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
text = 'leaf'
).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
|
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating random data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting steam and leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
original_data.sort_values(by=['stem', 'leaf'], inplace=True)
# Determining position
position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
position = np.hstack([position, [*group.reset_index().index.values]])
original_data['position'] = position + 1
# Creating stem and leaf plot
chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
text = 'leaf:N'
).configure_axis(labelFontSize=20).configure_text(fontSize=20)
|
Modify example to calculate leaf position
|
Modify example to calculate leaf position
|
Python
|
bsd-3-clause
|
altair-viz/altair,ellisonbg/altair,jakevdp/altair
|
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
- # Generating Random Data
+ # Generating random data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
- # Splitting Steam and Leaf
+ # Splitting steam and leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
- # Grouping Leafs for each Stem
- grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
+ original_data.sort_values(by=['stem', 'leaf'], inplace=True)
+
+ # Determining position
+ position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
- grouped_data = grouped_data.append({'stem':key,
- 'leaf': ''.join(group['leaf'].sort_values())},
- ignore_index=True)
+ position = np.hstack([position, [*group.reset_index().index.values]])
+
+ original_data['position'] = position + 1
- # Plotting Stems and Leafs
+ # Creating stem and leaf plot
- chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
+ chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
- y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
+ y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
+ x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
- text = 'leaf'
+ text = 'leaf:N'
- ).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
+ ).configure_axis(labelFontSize=20).configure_text(fontSize=20)
+
|
Modify example to calculate leaf position
|
## Code Before:
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating Random Data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting Steam and Leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
# Grouping Leafs for each Stem
grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
for key, group in original_data.groupby('stem'):
grouped_data = grouped_data.append({'stem':key,
'leaf': ''.join(group['leaf'].sort_values())},
ignore_index=True)
# Plotting Stems and Leafs
chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
text = 'leaf'
).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
## Instruction:
Modify example to calculate leaf position
## Code After:
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
# Generating random data
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
# Splitting steam and leaf
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
original_data.sort_values(by=['stem', 'leaf'], inplace=True)
# Determining position
position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
position = np.hstack([position, [*group.reset_index().index.values]])
original_data['position'] = position + 1
# Creating stem and leaf plot
chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
text = 'leaf:N'
).configure_axis(labelFontSize=20).configure_text(fontSize=20)
|
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(42)
- # Generating Random Data
? ^ ^
+ # Generating random data
? ^ ^
original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)})
- # Splitting Steam and Leaf
? ^ ^
+ # Splitting steam and leaf
? ^ ^
original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1])
original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1])
- # Grouping Leafs for each Stem
- grouped_data = pd.DataFrame(columns=['stem', 'leaf'])
+ original_data.sort_values(by=['stem', 'leaf'], inplace=True)
+
+ # Determining position
+ position = np.array([], dtype=np.int64)
for key, group in original_data.groupby('stem'):
- grouped_data = grouped_data.append({'stem':key,
- 'leaf': ''.join(group['leaf'].sort_values())},
- ignore_index=True)
+ position = np.hstack([position, [*group.reset_index().index.values]])
+
+ original_data['position'] = position + 1
- # Plotting Stems and Leafs
+ # Creating stem and leaf plot
- chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode(
? ^^^^^^ ^^
+ chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode(
? +++ ^^^^ + ^
- y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)),
+ y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)),
? ++
+ x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)),
- text = 'leaf'
+ text = 'leaf:N'
? ++
- ).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
? ----------------------
+ ).configure_axis(labelFontSize=20).configure_text(fontSize=20)
|
2979986e68d2b8c2b3fb4090e258a941d6a56d9e
|
tests/test_website_flow.py
|
tests/test_website_flow.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def test_website_can_respond(harness):
harness.fs.www.mk(('index.html.spt', 'Greetings, program!'))
assert harness.client.GET().body == 'Greetings, program!'
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def test_website_can_respond(harness):
harness.fs.www.mk(('index.html.spt', 'Greetings, program!'))
assert harness.client.GET().body == 'Greetings, program!'
def test_404_comes_out_404(harness):
harness.fs.project.mk(('404.html.spt', 'Eep!'))
assert harness.client.GET(raise_immediately=False).code == 404
|
Add failing test for exception handling regression
|
Add failing test for exception handling regression
Code coming out of custom error message needs to be the code expected.
|
Python
|
mit
|
gratipay/aspen.py,gratipay/aspen.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def test_website_can_respond(harness):
harness.fs.www.mk(('index.html.spt', 'Greetings, program!'))
assert harness.client.GET().body == 'Greetings, program!'
+
+ def test_404_comes_out_404(harness):
+ harness.fs.project.mk(('404.html.spt', 'Eep!'))
+ assert harness.client.GET(raise_immediately=False).code == 404
+
|
Add failing test for exception handling regression
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def test_website_can_respond(harness):
harness.fs.www.mk(('index.html.spt', 'Greetings, program!'))
assert harness.client.GET().body == 'Greetings, program!'
## Instruction:
Add failing test for exception handling regression
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def test_website_can_respond(harness):
harness.fs.www.mk(('index.html.spt', 'Greetings, program!'))
assert harness.client.GET().body == 'Greetings, program!'
def test_404_comes_out_404(harness):
harness.fs.project.mk(('404.html.spt', 'Eep!'))
assert harness.client.GET(raise_immediately=False).code == 404
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def test_website_can_respond(harness):
harness.fs.www.mk(('index.html.spt', 'Greetings, program!'))
assert harness.client.GET().body == 'Greetings, program!'
+
+
+ def test_404_comes_out_404(harness):
+ harness.fs.project.mk(('404.html.spt', 'Eep!'))
+ assert harness.client.GET(raise_immediately=False).code == 404
|
3c3e9b5f584c23c9359ca9dce71b89635fffd043
|
LiSE/LiSE/tests/test_load.py
|
LiSE/LiSE/tests/test_load.py
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng, kobold_pos=(9, 9))
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
eng.character['physical'].thing['kobold']['location'] = (3, 3)
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
!= eng._things_cache.keyframe['physical']['trunk'][1][tick1]
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
!= eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
|
Make test_multi_keyframe demonstrate what it's supposed to
|
Make test_multi_keyframe demonstrate what it's supposed to
I was testing a cache that wasn't behaving correctly for
unrelated reasons.
|
Python
|
agpl-3.0
|
LogicalDash/LiSE,LogicalDash/LiSE
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
- inittest(eng, kobold_pos=(9, 9))
+ inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
- eng.character['physical'].thing['kobold']['location'] = (3, 3)
+ del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
- assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
+ assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
- != eng._things_cache.keyframe['physical']['trunk'][1][tick1]
+ != eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
|
Make test_multi_keyframe demonstrate what it's supposed to
|
## Code Before:
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng, kobold_pos=(9, 9))
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
eng.character['physical'].thing['kobold']['location'] = (3, 3)
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
!= eng._things_cache.keyframe['physical']['trunk'][1][tick1]
## Instruction:
Make test_multi_keyframe demonstrate what it's supposed to
## Code After:
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
!= eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
- inittest(eng, kobold_pos=(9, 9))
+ inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
- eng.character['physical'].thing['kobold']['location'] = (3, 3)
+ del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
- assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
? --- ^
+ assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
? ^^^ ++
- != eng._things_cache.keyframe['physical']['trunk'][1][tick1]
? --- ^
+ != eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
? ^^^ ++
|
0adadcb3f04e2ecb98b5ca5de1afba2ba7208d23
|
spacy/tests/parser/test_beam_parse.py
|
spacy/tests/parser/test_beam_parse.py
|
import spacy
import pytest
@pytest.mark.models
def test_beam_parse():
nlp = spacy.load('en_core_web_sm')
doc = nlp(u'Australia is a country', disable=['ner'])
ents = nlp.entity(doc, beam_width=2)
print(ents)
|
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
|
Fix beam parse model test
|
Fix beam parse model test
|
Python
|
mit
|
aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy
|
- import spacy
+ from __future__ import unicode_literals
+
import pytest
+
- @pytest.mark.models
+ @pytest.mark.models('en')
- def test_beam_parse():
+ def test_beam_parse(EN):
- nlp = spacy.load('en_core_web_sm')
- doc = nlp(u'Australia is a country', disable=['ner'])
+ doc = EN(u'Australia is a country', disable=['ner'])
- ents = nlp.entity(doc, beam_width=2)
+ ents = EN.entity(doc, beam_width=2)
print(ents)
-
|
Fix beam parse model test
|
## Code Before:
import spacy
import pytest
@pytest.mark.models
def test_beam_parse():
nlp = spacy.load('en_core_web_sm')
doc = nlp(u'Australia is a country', disable=['ner'])
ents = nlp.entity(doc, beam_width=2)
print(ents)
## Instruction:
Fix beam parse model test
## Code After:
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
|
- import spacy
+ from __future__ import unicode_literals
+
import pytest
+
- @pytest.mark.models
+ @pytest.mark.models('en')
? ++++++
- def test_beam_parse():
+ def test_beam_parse(EN):
? ++
- nlp = spacy.load('en_core_web_sm')
- doc = nlp(u'Australia is a country', disable=['ner'])
? ^^^
+ doc = EN(u'Australia is a country', disable=['ner'])
? ^^
- ents = nlp.entity(doc, beam_width=2)
? ^^^
+ ents = EN.entity(doc, beam_width=2)
? ^^
print(ents)
-
|
0f54780e142cb6bd15df2ed702bd4fa4b2d3fe79
|
keys.py
|
keys.py
|
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
Use spaces instead of tabs
|
Use spaces instead of tabs
|
Python
|
mit
|
bman4789/weatherBot,bman4789/weatherBot,BrianMitchL/weatherBot
|
keys = dict(
- consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+ consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
- consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+ consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
- access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+ access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
- access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+ access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
Use spaces instead of tabs
|
## Code Before:
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
## Instruction:
Use spaces instead of tabs
## Code After:
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
keys = dict(
- consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^
+ consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^^^^
- consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^
+ consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^^^^
- access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^
+ access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^^^^
- access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^
+ access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
? ^^^^
)
|
92c01be43b80247ce2233851dd74b041bb9d44b0
|
csunplugged/resources/views/BarcodeChecksumPosterResourceGenerator.py
|
csunplugged/resources/views/BarcodeChecksumPosterResourceGenerator.py
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
Modify Barcode Checksum Poster resource to dynamically overlay text
|
Modify Barcode Checksum Poster resource to dynamically overlay text
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Class for Barcode Checksum Poster resource generator."""
- from PIL import Image
+ from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
+ from utils.TextBoxDrawer import TextBoxDrawer
+ from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
- image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
+ path = "static/img/resources/barcode-checksum-poster/{}-digits"
- image_path = image_path.format(self.requested_options["barcode_length"])
+ path = path.format(self.requested_options["barcode_length"])
+ image_path = "{}.png".format(path)
+ svg_path = "{}.svg".format(path)
image = Image.open(image_path)
+
+ draw = ImageDraw.Draw(image)
+ textbox_drawer = TextBoxDrawer(image, draw, svg_path)
+
+ textbox_drawer.write_text_box(
+ "title",
+ _("13 Digit Barcode"),
+ horiz_just="center",
+ vert_just="center",
+ )
+
+ headings = {
+ "heading1": _("Separate!"),
+ "heading2": _("Operate!"),
+ "heading3": _("Calculate!")
+ }
+
+ for heading_id, heading in headings.items():
+ textbox_drawer.write_text_box(
+ heading_id,
+ heading,
+ )
+
+ textbox_drawer.write_text_box(
+ "paragraph",
+ _("Remember that this algorithm uses modulo 10, so we are only "
+ "interested in the number in the one's column."),
+ )
+
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
Modify Barcode Checksum Poster resource to dynamically overlay text
|
## Code Before:
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image
from utils.BaseResourceGenerator import BaseResourceGenerator
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
image_path = image_path.format(self.requested_options["barcode_length"])
image = Image.open(image_path)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
## Instruction:
Modify Barcode Checksum Poster resource to dynamically overlay text
## Code After:
"""Class for Barcode Checksum Poster resource generator."""
from PIL import Image, ImageDraw
from utils.BaseResourceGenerator import BaseResourceGenerator
from utils.TextBoxDrawer import TextBoxDrawer
from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
path = "static/img/resources/barcode-checksum-poster/{}-digits"
path = path.format(self.requested_options["barcode_length"])
image_path = "{}.png".format(path)
svg_path = "{}.svg".format(path)
image = Image.open(image_path)
draw = ImageDraw.Draw(image)
textbox_drawer = TextBoxDrawer(image, draw, svg_path)
textbox_drawer.write_text_box(
"title",
_("13 Digit Barcode"),
horiz_just="center",
vert_just="center",
)
headings = {
"heading1": _("Separate!"),
"heading2": _("Operate!"),
"heading3": _("Calculate!")
}
for heading_id, heading in headings.items():
textbox_drawer.write_text_box(
heading_id,
heading,
)
textbox_drawer.write_text_box(
"paragraph",
_("Remember that this algorithm uses modulo 10, so we are only "
"interested in the number in the one's column."),
)
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
"""Class for Barcode Checksum Poster resource generator."""
- from PIL import Image
+ from PIL import Image, ImageDraw
? +++++++++++
from utils.BaseResourceGenerator import BaseResourceGenerator
+ from utils.TextBoxDrawer import TextBoxDrawer
+ from django.utils.translation import ugettext as _
class BarcodeChecksumPosterResourceGenerator(BaseResourceGenerator):
"""Class for Grid resource generator."""
additional_valid_options = {
"barcode_length": ["12", "13"]
}
def data(self):
"""Create data for a copy of the Grid resource.
Returns:
A dictionary of the one page for the resource.
"""
- image_path = "static/img/resources/barcode-checksum-poster/{}-digits.png"
? ------ ----
+ path = "static/img/resources/barcode-checksum-poster/{}-digits"
- image_path = image_path.format(self.requested_options["barcode_length"])
? ------ ------
+ path = path.format(self.requested_options["barcode_length"])
+ image_path = "{}.png".format(path)
+ svg_path = "{}.svg".format(path)
image = Image.open(image_path)
+
+ draw = ImageDraw.Draw(image)
+ textbox_drawer = TextBoxDrawer(image, draw, svg_path)
+
+ textbox_drawer.write_text_box(
+ "title",
+ _("13 Digit Barcode"),
+ horiz_just="center",
+ vert_just="center",
+ )
+
+ headings = {
+ "heading1": _("Separate!"),
+ "heading2": _("Operate!"),
+ "heading3": _("Calculate!")
+ }
+
+ for heading_id, heading in headings.items():
+ textbox_drawer.write_text_box(
+ heading_id,
+ heading,
+ )
+
+ textbox_drawer.write_text_box(
+ "paragraph",
+ _("Remember that this algorithm uses modulo 10, so we are only "
+ "interested in the number in the one's column."),
+ )
+
return {"type": "image", "data": image}
@property
def subtitle(self):
"""Return the subtitle string of the resource.
Used after the resource name in the filename, and
also on the resource image.
Returns:
text for subtitle (str).
"""
barcode_length = self.requested_options["barcode_length"]
return "{} digits - {}".format(barcode_length, super().subtitle)
|
da67ce3f25a708b99cb81f17703e74965dbea960
|
rtrss/filestorage/httputil.py
|
rtrss/filestorage/httputil.py
|
import logging
import time
import requests
from googleapiclient.errors import HttpError
# Number of retries in case of API errors
NUM_RETRIES = 3
# Delay between retry attempts, seconds
RETRY_DELAY = 1
_logger = logging.getLogger(__name__)
def is_retryable(exc):
retryable_codes = [500, 502, 503, 504]
"""Returns True if exception is "retryable", eg. HTTP 503"""
if issubclass(exc, requests.exceptions.RequestException):
code = exc.response.status_code
elif issubclass(exc, HttpError):
code = exc.resp.status
else:
return False
return code in retryable_codes
def retry_on_exception(
exceptions=(HttpError, requests.exceptions.RequestException),
retryable=is_retryable,
tries=NUM_RETRIES,
delay=RETRY_DELAY):
"""Retry call if function raises retryable exception"""
def wrap(f):
def wrapped_f(*args, **kwargs):
mtries = tries
while mtries > 1:
try:
return f(*args, **kwargs)
except exceptions as err:
# Reraise if non-retryable error
if not retryable(err):
raise
_logger.warn("Retrying in %.2f seconds ...", delay)
time.sleep(delay)
mtries -= 1
# Only one last try left
return f(*args, **kwargs)
return wrapped_f
return wrap
|
import logging
import time
import requests
from googleapiclient.errors import HttpError
# Number of retries in case of API errors
NUM_RETRIES = 3
# Delay between retry attempts, seconds
RETRY_DELAY = 1
_logger = logging.getLogger(__name__)
def is_retryable(exc):
retryable_codes = [500, 502, 503, 504]
"""Returns True if exception is "retryable", eg. HTTP 503"""
if isinstance(exc, requests.exceptions.RequestException):
code = exc.response.status_code
elif isinstance(exc, HttpError):
code = exc.resp.status
else:
return False
return code in retryable_codes
def retry_on_exception(
retryable=is_retryable,
tries=NUM_RETRIES,
delay=RETRY_DELAY):
"""Retry call if function raises retryable exception"""
def wrap(f):
def wrapped_f(*args, **kwargs):
mtries = tries
while mtries > 1:
try:
return f(*args, **kwargs)
except Exception as err:
# Re-raise if non-retryable error
if not retryable(err):
raise
_logger.warn("Retrying in %.2f seconds ...", delay)
time.sleep(delay)
mtries -= 1
# Only one last try left
return f(*args, **kwargs)
return wrapped_f
return wrap
|
Remove unnecessary parameter, fix type detection bug
|
Remove unnecessary parameter, fix type detection bug
|
Python
|
apache-2.0
|
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
|
import logging
import time
import requests
from googleapiclient.errors import HttpError
+
# Number of retries in case of API errors
NUM_RETRIES = 3
# Delay between retry attempts, seconds
RETRY_DELAY = 1
_logger = logging.getLogger(__name__)
def is_retryable(exc):
retryable_codes = [500, 502, 503, 504]
"""Returns True if exception is "retryable", eg. HTTP 503"""
- if issubclass(exc, requests.exceptions.RequestException):
+ if isinstance(exc, requests.exceptions.RequestException):
code = exc.response.status_code
- elif issubclass(exc, HttpError):
+ elif isinstance(exc, HttpError):
code = exc.resp.status
else:
return False
return code in retryable_codes
def retry_on_exception(
- exceptions=(HttpError, requests.exceptions.RequestException),
retryable=is_retryable,
tries=NUM_RETRIES,
delay=RETRY_DELAY):
"""Retry call if function raises retryable exception"""
def wrap(f):
def wrapped_f(*args, **kwargs):
mtries = tries
while mtries > 1:
try:
return f(*args, **kwargs)
- except exceptions as err:
+ except Exception as err:
- # Reraise if non-retryable error
+ # Re-raise if non-retryable error
if not retryable(err):
raise
_logger.warn("Retrying in %.2f seconds ...", delay)
time.sleep(delay)
mtries -= 1
# Only one last try left
return f(*args, **kwargs)
return wrapped_f
return wrap
|
Remove unnecessary parameter, fix type detection bug
|
## Code Before:
import logging
import time
import requests
from googleapiclient.errors import HttpError
# Number of retries in case of API errors
NUM_RETRIES = 3
# Delay between retry attempts, seconds
RETRY_DELAY = 1
_logger = logging.getLogger(__name__)
def is_retryable(exc):
retryable_codes = [500, 502, 503, 504]
"""Returns True if exception is "retryable", eg. HTTP 503"""
if issubclass(exc, requests.exceptions.RequestException):
code = exc.response.status_code
elif issubclass(exc, HttpError):
code = exc.resp.status
else:
return False
return code in retryable_codes
def retry_on_exception(
exceptions=(HttpError, requests.exceptions.RequestException),
retryable=is_retryable,
tries=NUM_RETRIES,
delay=RETRY_DELAY):
"""Retry call if function raises retryable exception"""
def wrap(f):
def wrapped_f(*args, **kwargs):
mtries = tries
while mtries > 1:
try:
return f(*args, **kwargs)
except exceptions as err:
# Reraise if non-retryable error
if not retryable(err):
raise
_logger.warn("Retrying in %.2f seconds ...", delay)
time.sleep(delay)
mtries -= 1
# Only one last try left
return f(*args, **kwargs)
return wrapped_f
return wrap
## Instruction:
Remove unnecessary parameter, fix type detection bug
## Code After:
import logging
import time
import requests
from googleapiclient.errors import HttpError
# Number of retries in case of API errors
NUM_RETRIES = 3
# Delay between retry attempts, seconds
RETRY_DELAY = 1
_logger = logging.getLogger(__name__)
def is_retryable(exc):
retryable_codes = [500, 502, 503, 504]
"""Returns True if exception is "retryable", eg. HTTP 503"""
if isinstance(exc, requests.exceptions.RequestException):
code = exc.response.status_code
elif isinstance(exc, HttpError):
code = exc.resp.status
else:
return False
return code in retryable_codes
def retry_on_exception(
retryable=is_retryable,
tries=NUM_RETRIES,
delay=RETRY_DELAY):
"""Retry call if function raises retryable exception"""
def wrap(f):
def wrapped_f(*args, **kwargs):
mtries = tries
while mtries > 1:
try:
return f(*args, **kwargs)
except Exception as err:
# Re-raise if non-retryable error
if not retryable(err):
raise
_logger.warn("Retrying in %.2f seconds ...", delay)
time.sleep(delay)
mtries -= 1
# Only one last try left
return f(*args, **kwargs)
return wrapped_f
return wrap
|
import logging
import time
import requests
from googleapiclient.errors import HttpError
+
# Number of retries in case of API errors
NUM_RETRIES = 3
# Delay between retry attempts, seconds
RETRY_DELAY = 1
_logger = logging.getLogger(__name__)
def is_retryable(exc):
retryable_codes = [500, 502, 503, 504]
"""Returns True if exception is "retryable", eg. HTTP 503"""
- if issubclass(exc, requests.exceptions.RequestException):
? ^^ ^^^^
+ if isinstance(exc, requests.exceptions.RequestException):
? ++ ^^^ ^
code = exc.response.status_code
- elif issubclass(exc, HttpError):
? ^^ ^^^^
+ elif isinstance(exc, HttpError):
? ++ ^^^ ^
code = exc.resp.status
else:
return False
return code in retryable_codes
def retry_on_exception(
- exceptions=(HttpError, requests.exceptions.RequestException),
retryable=is_retryable,
tries=NUM_RETRIES,
delay=RETRY_DELAY):
"""Retry call if function raises retryable exception"""
def wrap(f):
def wrapped_f(*args, **kwargs):
mtries = tries
while mtries > 1:
try:
return f(*args, **kwargs)
- except exceptions as err:
? ^ -
+ except Exception as err:
? ^
- # Reraise if non-retryable error
+ # Re-raise if non-retryable error
? +
if not retryable(err):
raise
_logger.warn("Retrying in %.2f seconds ...", delay)
time.sleep(delay)
mtries -= 1
# Only one last try left
return f(*args, **kwargs)
return wrapped_f
return wrap
|
7ebc9a4511d52707ce88a1b8bc2d3fa638e1fb91
|
c2rst.py
|
c2rst.py
|
import sphinx.parsers
import docutils.parsers.rst as rst
class CStrip(sphinx.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
import docutils.parsers
import docutils.parsers.rst as rst
class CStrip(docutils.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
|
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
|
Python
|
mit
|
adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython
|
- import sphinx.parsers
+ import docutils.parsers
import docutils.parsers.rst as rst
- class CStrip(sphinx.parsers.Parser):
+ class CStrip(docutils.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
|
## Code Before:
import sphinx.parsers
import docutils.parsers.rst as rst
class CStrip(sphinx.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
## Instruction:
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
## Code After:
import docutils.parsers
import docutils.parsers.rst as rst
class CStrip(docutils.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
- import sphinx.parsers
+ import docutils.parsers
import docutils.parsers.rst as rst
- class CStrip(sphinx.parsers.Parser):
? -----
+ class CStrip(docutils.parsers.Parser):
? +++++++
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
1ac4e00f3d06955da90bddf03a6e478ddeb4d220
|
core/modules/html_has_same_domain.py
|
core/modules/html_has_same_domain.py
|
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if ("naver" in tag.text.lower()):
return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
|
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if cnt >= 1:
return "S", mod
return "U", mod
|
Undo underperformaing change to code
|
Undo underperformaing change to code
|
Python
|
bsd-2-clause
|
mjkim610/phishing-detection,jaeyung1001/phishing_site_detection
|
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
- if ("naver" in tag.text.lower()):
- return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
|
Undo underperformaing change to code
|
## Code Before:
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if ("naver" in tag.text.lower()):
return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
## Instruction:
Undo underperformaing change to code
## Code After:
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if cnt >= 1:
return "S", mod
return "U", mod
|
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
- if ("naver" in tag.text.lower()):
- return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
|
7d1463fc732cdc6aef3299c6d2bbe916418e6d6e
|
hkisaml/api.py
|
hkisaml/api.py
|
from django.contrib.auth.models import User
from rest_framework import permissions, routers, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
from django.contrib.auth.models import User
from rest_framework import permissions, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
if obj.first_name and obj.last_name:
ret['full_name'] = '%s %s' % (obj.first_name, obj.last_name)
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
Add full_name field to API
|
Add full_name field to API
|
Python
|
mit
|
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
|
from django.contrib.auth.models import User
- from rest_framework import permissions, routers, serializers, generics, mixins
+ from rest_framework import permissions, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
+ if obj.first_name and obj.last_name:
+ ret['full_name'] = '%s %s' % (obj.first_name, obj.last_name)
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
Add full_name field to API
|
## Code Before:
from django.contrib.auth.models import User
from rest_framework import permissions, routers, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
## Instruction:
Add full_name field to API
## Code After:
from django.contrib.auth.models import User
from rest_framework import permissions, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
if obj.first_name and obj.last_name:
ret['full_name'] = '%s %s' % (obj.first_name, obj.last_name)
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
from django.contrib.auth.models import User
- from rest_framework import permissions, routers, serializers, generics, mixins
? ---------
+ from rest_framework import permissions, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
+ if obj.first_name and obj.last_name:
+ ret['full_name'] = '%s %s' % (obj.first_name, obj.last_name)
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
61fe55efba2c491da6a93421fa702f123615bc32
|
spacy/lang/en/__init__.py
|
spacy/lang/en/__init__.py
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from .lemmatizer import LEMMA_RULES, LEMMA_INDEX, LEMMA_EXC
from .syntax_iterators import SYNTAX_ITERATORS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
lex_attr_getters.update(LEX_ATTRS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
morph_rules = dict(MORPH_RULES)
lemma_rules = dict(LEMMA_RULES)
lemma_index = dict(LEMMA_INDEX)
lemma_exc = dict(LEMMA_EXC)
sytax_iterators = dict(SYNTAX_ITERATORS)
__all__ = ['English']
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from .lemmatizer import LEMMA_RULES, LEMMA_INDEX, LEMMA_EXC
from .syntax_iterators import SYNTAX_ITERATORS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
class EnglishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
lex_attr_getters.update(LEX_ATTRS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
morph_rules = dict(MORPH_RULES)
lemma_rules = dict(LEMMA_RULES)
lemma_index = dict(LEMMA_INDEX)
lemma_exc = dict(LEMMA_EXC)
sytax_iterators = dict(SYNTAX_ITERATORS)
class English(Language):
lang = 'en'
Defaults = EnglishDefaults
__all__ = ['English', 'EnglishDefaults']
|
Move EnglishDefaults class out of English
|
Move EnglishDefaults class out of English
|
Python
|
mit
|
honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from .lemmatizer import LEMMA_RULES, LEMMA_INDEX, LEMMA_EXC
from .syntax_iterators import SYNTAX_ITERATORS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
+ class EnglishDefaults(Language.Defaults):
+ lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
+ lex_attr_getters[LANG] = lambda text: 'en'
+ lex_attr_getters.update(LEX_ATTRS)
+
+ tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
+ tag_map = dict(TAG_MAP)
+ stop_words = set(STOP_WORDS)
+ morph_rules = dict(MORPH_RULES)
+ lemma_rules = dict(LEMMA_RULES)
+ lemma_index = dict(LEMMA_INDEX)
+ lemma_exc = dict(LEMMA_EXC)
+ sytax_iterators = dict(SYNTAX_ITERATORS)
+
+
class English(Language):
lang = 'en'
+ Defaults = EnglishDefaults
- class Defaults(Language.Defaults):
- lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
- lex_attr_getters[LANG] = lambda text: 'en'
- lex_attr_getters.update(LEX_ATTRS)
-
- tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
- tag_map = dict(TAG_MAP)
- stop_words = set(STOP_WORDS)
- morph_rules = dict(MORPH_RULES)
- lemma_rules = dict(LEMMA_RULES)
- lemma_index = dict(LEMMA_INDEX)
- lemma_exc = dict(LEMMA_EXC)
- sytax_iterators = dict(SYNTAX_ITERATORS)
- __all__ = ['English']
+ __all__ = ['English', 'EnglishDefaults']
|
Move EnglishDefaults class out of English
|
## Code Before:
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from .lemmatizer import LEMMA_RULES, LEMMA_INDEX, LEMMA_EXC
from .syntax_iterators import SYNTAX_ITERATORS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
lex_attr_getters.update(LEX_ATTRS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
morph_rules = dict(MORPH_RULES)
lemma_rules = dict(LEMMA_RULES)
lemma_index = dict(LEMMA_INDEX)
lemma_exc = dict(LEMMA_EXC)
sytax_iterators = dict(SYNTAX_ITERATORS)
__all__ = ['English']
## Instruction:
Move EnglishDefaults class out of English
## Code After:
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from .lemmatizer import LEMMA_RULES, LEMMA_INDEX, LEMMA_EXC
from .syntax_iterators import SYNTAX_ITERATORS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
class EnglishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
lex_attr_getters.update(LEX_ATTRS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
morph_rules = dict(MORPH_RULES)
lemma_rules = dict(LEMMA_RULES)
lemma_index = dict(LEMMA_INDEX)
lemma_exc = dict(LEMMA_EXC)
sytax_iterators = dict(SYNTAX_ITERATORS)
class English(Language):
lang = 'en'
Defaults = EnglishDefaults
__all__ = ['English', 'EnglishDefaults']
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from .lemmatizer import LEMMA_RULES, LEMMA_INDEX, LEMMA_EXC
from .syntax_iterators import SYNTAX_ITERATORS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
+ class EnglishDefaults(Language.Defaults):
+ lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
+ lex_attr_getters[LANG] = lambda text: 'en'
+ lex_attr_getters.update(LEX_ATTRS)
+
+ tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
+ tag_map = dict(TAG_MAP)
+ stop_words = set(STOP_WORDS)
+ morph_rules = dict(MORPH_RULES)
+ lemma_rules = dict(LEMMA_RULES)
+ lemma_index = dict(LEMMA_INDEX)
+ lemma_exc = dict(LEMMA_EXC)
+ sytax_iterators = dict(SYNTAX_ITERATORS)
+
+
class English(Language):
lang = 'en'
+ Defaults = EnglishDefaults
- class Defaults(Language.Defaults):
- lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
- lex_attr_getters[LANG] = lambda text: 'en'
- lex_attr_getters.update(LEX_ATTRS)
-
- tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
- tag_map = dict(TAG_MAP)
- stop_words = set(STOP_WORDS)
- morph_rules = dict(MORPH_RULES)
- lemma_rules = dict(LEMMA_RULES)
- lemma_index = dict(LEMMA_INDEX)
- lemma_exc = dict(LEMMA_EXC)
- sytax_iterators = dict(SYNTAX_ITERATORS)
- __all__ = ['English']
+ __all__ = ['English', 'EnglishDefaults']
|
fccc7b59e742bc887580c91c2c2dbeae2c85caee
|
wagtailannotatedimage/views.py
|
wagtailannotatedimage/views.py
|
from django.http import HttpResponse
from wagtail.wagtailimages.models import Filter, Image
def get_full_image_url(request, image_id):
image = Image.objects.get(id=image_id)
if image:
filter, _ = Filter.objects.get_or_create(spec='original')
orig_rendition = image.get_rendition(filter)
return HttpResponse(orig_rendition.img_tag())
else:
return HttpResponse('')
|
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from wagtail.wagtailimages.models import Filter, get_iamge_model
Image = get_iamge_model()
def get_full_image_url(request, image_id):
image = get_object_or_404(Image, id=image_id)
if image:
filter, _ = Filter.objects.get_or_create(spec='original')
orig_rendition = image.get_rendition(filter)
return HttpResponse(orig_rendition.img_tag())
else:
return HttpResponse('')
|
Allow for custom image models, 404 on image not found intead of error
|
Allow for custom image models, 404 on image not found intead of error
|
Python
|
bsd-3-clause
|
takeflight/wagtailannotatedimage,takeflight/wagtailannotatedimage,takeflight/wagtailannotatedimage
|
from django.http import HttpResponse
+ from django.shortcuts import get_object_or_404
- from wagtail.wagtailimages.models import Filter, Image
+ from wagtail.wagtailimages.models import Filter, get_iamge_model
+
+ Image = get_iamge_model()
def get_full_image_url(request, image_id):
- image = Image.objects.get(id=image_id)
+ image = get_object_or_404(Image, id=image_id)
if image:
filter, _ = Filter.objects.get_or_create(spec='original')
orig_rendition = image.get_rendition(filter)
return HttpResponse(orig_rendition.img_tag())
else:
return HttpResponse('')
|
Allow for custom image models, 404 on image not found intead of error
|
## Code Before:
from django.http import HttpResponse
from wagtail.wagtailimages.models import Filter, Image
def get_full_image_url(request, image_id):
image = Image.objects.get(id=image_id)
if image:
filter, _ = Filter.objects.get_or_create(spec='original')
orig_rendition = image.get_rendition(filter)
return HttpResponse(orig_rendition.img_tag())
else:
return HttpResponse('')
## Instruction:
Allow for custom image models, 404 on image not found intead of error
## Code After:
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from wagtail.wagtailimages.models import Filter, get_iamge_model
Image = get_iamge_model()
def get_full_image_url(request, image_id):
image = get_object_or_404(Image, id=image_id)
if image:
filter, _ = Filter.objects.get_or_create(spec='original')
orig_rendition = image.get_rendition(filter)
return HttpResponse(orig_rendition.img_tag())
else:
return HttpResponse('')
|
from django.http import HttpResponse
+ from django.shortcuts import get_object_or_404
- from wagtail.wagtailimages.models import Filter, Image
? ---
+ from wagtail.wagtailimages.models import Filter, get_iamge_model
? +++++++++++++
+
+ Image = get_iamge_model()
def get_full_image_url(request, image_id):
- image = Image.objects.get(id=image_id)
+ image = get_object_or_404(Image, id=image_id)
if image:
filter, _ = Filter.objects.get_or_create(spec='original')
orig_rendition = image.get_rendition(filter)
return HttpResponse(orig_rendition.img_tag())
else:
return HttpResponse('')
|
8dccce77f6c08a7c20f38b9f1bacc27b71ab56a1
|
examples/web/wiki/macros/utils.py
|
examples/web/wiki/macros/utils.py
|
def macros(macro, environ, *args, **kwargs):
"""Return a list of available macros"""
s = "\n".join(["* %s" % k for k in environ["macros"].keys()])
return environ["parser"].generate(s, environ=environ)
|
from inspect import getdoc
def macros(macro, environ, *args, **kwargs):
"""Return a list of available macros"""
macros = environ["macros"].items()
s = "\n".join(["== %s ==\n%s\n" % (k, getdoc(v)) for k, v in macros])
return environ["parser"].generate(s, environ=environ)
|
Change the output of <<macros>>
|
examples/web/wiki: Change the output of <<macros>>
|
Python
|
mit
|
treemo/circuits,eriol/circuits,treemo/circuits,eriol/circuits,nizox/circuits,treemo/circuits,eriol/circuits
|
+
+ from inspect import getdoc
def macros(macro, environ, *args, **kwargs):
"""Return a list of available macros"""
- s = "\n".join(["* %s" % k for k in environ["macros"].keys()])
+ macros = environ["macros"].items()
+ s = "\n".join(["== %s ==\n%s\n" % (k, getdoc(v)) for k, v in macros])
return environ["parser"].generate(s, environ=environ)
|
Change the output of <<macros>>
|
## Code Before:
def macros(macro, environ, *args, **kwargs):
"""Return a list of available macros"""
s = "\n".join(["* %s" % k for k in environ["macros"].keys()])
return environ["parser"].generate(s, environ=environ)
## Instruction:
Change the output of <<macros>>
## Code After:
from inspect import getdoc
def macros(macro, environ, *args, **kwargs):
"""Return a list of available macros"""
macros = environ["macros"].items()
s = "\n".join(["== %s ==\n%s\n" % (k, getdoc(v)) for k, v in macros])
return environ["parser"].generate(s, environ=environ)
|
+
+ from inspect import getdoc
def macros(macro, environ, *args, **kwargs):
"""Return a list of available macros"""
- s = "\n".join(["* %s" % k for k in environ["macros"].keys()])
+ macros = environ["macros"].items()
+ s = "\n".join(["== %s ==\n%s\n" % (k, getdoc(v)) for k, v in macros])
return environ["parser"].generate(s, environ=environ)
|
301f23067dde512f56ba5bf2201b666d125ffc96
|
setup.py
|
setup.py
|
import sys
import os
from cx_Freeze import setup, Executable
paths = []
paths.extend(sys.path)
paths.append('whacked4')
build_exe_options = {
'packages': [],
'path': paths,
'include_files': ['res', 'cfg', 'docs', 'LICENSE', 'README.md'],
'optimize': 2,
'include_msvcr': True
}
build_exe_options['path'].append('src')
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
exe = Executable(
'src/main.py',
base=base,
targetName=os.environ['app_name_lower'] + '.exe',
icon='res/icon-hatchet.ico'
)
setup(
name = os.environ['app_title'],
version = os.environ['app_version_value'],
description = os.environ['app_description'],
options = {'build_exe': build_exe_options},
executables = [exe]
)
|
import sys
import os
from cx_Freeze import setup, Executable
paths = []
paths.extend(sys.path)
paths.append('src')
build_exe_options = {
'path': paths,
'packages': ['whacked4'],
'include_files': ['res', 'cfg', 'docs', 'LICENSE', 'README.md'],
'optimize': 2,
'include_msvcr': True
}
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
exe = Executable(
'src/main.py',
base=base,
targetName=os.environ['app_name_lower'] + '.exe',
icon='res/icon-hatchet.ico'
)
setup(
name = os.environ['app_title'],
version = os.environ['app_version_value'],
description = os.environ['app_description'],
options = {'build_exe': build_exe_options},
executables = [exe]
)
|
Update distutils script. Release builds still twice the size though...
|
Update distutils script. Release builds still twice the size though...
|
Python
|
bsd-2-clause
|
GitExl/WhackEd4,GitExl/WhackEd4
|
import sys
import os
from cx_Freeze import setup, Executable
paths = []
paths.extend(sys.path)
- paths.append('whacked4')
+ paths.append('src')
build_exe_options = {
- 'packages': [],
'path': paths,
+ 'packages': ['whacked4'],
'include_files': ['res', 'cfg', 'docs', 'LICENSE', 'README.md'],
'optimize': 2,
'include_msvcr': True
}
- build_exe_options['path'].append('src')
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
exe = Executable(
'src/main.py',
base=base,
targetName=os.environ['app_name_lower'] + '.exe',
icon='res/icon-hatchet.ico'
)
setup(
name = os.environ['app_title'],
version = os.environ['app_version_value'],
description = os.environ['app_description'],
options = {'build_exe': build_exe_options},
executables = [exe]
)
|
Update distutils script. Release builds still twice the size though...
|
## Code Before:
import sys
import os
from cx_Freeze import setup, Executable
paths = []
paths.extend(sys.path)
paths.append('whacked4')
build_exe_options = {
'packages': [],
'path': paths,
'include_files': ['res', 'cfg', 'docs', 'LICENSE', 'README.md'],
'optimize': 2,
'include_msvcr': True
}
build_exe_options['path'].append('src')
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
exe = Executable(
'src/main.py',
base=base,
targetName=os.environ['app_name_lower'] + '.exe',
icon='res/icon-hatchet.ico'
)
setup(
name = os.environ['app_title'],
version = os.environ['app_version_value'],
description = os.environ['app_description'],
options = {'build_exe': build_exe_options},
executables = [exe]
)
## Instruction:
Update distutils script. Release builds still twice the size though...
## Code After:
import sys
import os
from cx_Freeze import setup, Executable
paths = []
paths.extend(sys.path)
paths.append('src')
build_exe_options = {
'path': paths,
'packages': ['whacked4'],
'include_files': ['res', 'cfg', 'docs', 'LICENSE', 'README.md'],
'optimize': 2,
'include_msvcr': True
}
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
exe = Executable(
'src/main.py',
base=base,
targetName=os.environ['app_name_lower'] + '.exe',
icon='res/icon-hatchet.ico'
)
setup(
name = os.environ['app_title'],
version = os.environ['app_version_value'],
description = os.environ['app_description'],
options = {'build_exe': build_exe_options},
executables = [exe]
)
|
import sys
import os
from cx_Freeze import setup, Executable
paths = []
paths.extend(sys.path)
- paths.append('whacked4')
? ^^^ ----
+ paths.append('src')
? ^^
build_exe_options = {
- 'packages': [],
'path': paths,
+ 'packages': ['whacked4'],
'include_files': ['res', 'cfg', 'docs', 'LICENSE', 'README.md'],
'optimize': 2,
'include_msvcr': True
}
- build_exe_options['path'].append('src')
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
exe = Executable(
'src/main.py',
base=base,
targetName=os.environ['app_name_lower'] + '.exe',
icon='res/icon-hatchet.ico'
)
setup(
name = os.environ['app_title'],
version = os.environ['app_version_value'],
description = os.environ['app_description'],
options = {'build_exe': build_exe_options},
executables = [exe]
)
|
933e7b61f5d7c73924ea89a6ce17acf39e4f9c8d
|
packages/Python/lldbsuite/test/lang/c/unicode/TestUnicodeSymbols.py
|
packages/Python/lldbsuite/test/lang/c/unicode/TestUnicodeSymbols.py
|
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class TestUnicodeSymbols(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(compiler="clang", compiler_version=['<', '7.0'])
def test_union_members(self):
self.build()
spec = lldb.SBModuleSpec()
spec.SetFileSpec(lldb.SBFileSpec(self.getBuildArtifact("a.out")))
module = lldb.SBModule(spec)
self.assertTrue(module.IsValid())
mytype = module.FindFirstType("foobár")
self.assertTrue(mytype.IsValid())
self.assertTrue(mytype.IsPointerType())
|
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class TestUnicodeSymbols(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipIf(compiler="clang", compiler_version=['<', '7.0'])
def test_union_members(self):
self.build()
spec = lldb.SBModuleSpec()
spec.SetFileSpec(lldb.SBFileSpec(self.getBuildArtifact("a.out")))
module = lldb.SBModule(spec)
self.assertTrue(module.IsValid())
mytype = module.FindFirstType("foobár")
self.assertTrue(mytype.IsValid())
self.assertTrue(mytype.IsPointerType())
|
Change xfail to skipIf. The exact condition is really difficult to get right and doesn't add much signal.
|
Change xfail to skipIf. The exact condition is really difficult to get
right and doesn't add much signal.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@340574 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb
|
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class TestUnicodeSymbols(TestBase):
mydir = TestBase.compute_mydir(__file__)
- @expectedFailureAll(compiler="clang", compiler_version=['<', '7.0'])
+ @skipIf(compiler="clang", compiler_version=['<', '7.0'])
def test_union_members(self):
self.build()
spec = lldb.SBModuleSpec()
spec.SetFileSpec(lldb.SBFileSpec(self.getBuildArtifact("a.out")))
module = lldb.SBModule(spec)
self.assertTrue(module.IsValid())
mytype = module.FindFirstType("foobár")
self.assertTrue(mytype.IsValid())
self.assertTrue(mytype.IsPointerType())
|
Change xfail to skipIf. The exact condition is really difficult to get right and doesn't add much signal.
|
## Code Before:
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class TestUnicodeSymbols(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(compiler="clang", compiler_version=['<', '7.0'])
def test_union_members(self):
self.build()
spec = lldb.SBModuleSpec()
spec.SetFileSpec(lldb.SBFileSpec(self.getBuildArtifact("a.out")))
module = lldb.SBModule(spec)
self.assertTrue(module.IsValid())
mytype = module.FindFirstType("foobár")
self.assertTrue(mytype.IsValid())
self.assertTrue(mytype.IsPointerType())
## Instruction:
Change xfail to skipIf. The exact condition is really difficult to get right and doesn't add much signal.
## Code After:
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class TestUnicodeSymbols(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipIf(compiler="clang", compiler_version=['<', '7.0'])
def test_union_members(self):
self.build()
spec = lldb.SBModuleSpec()
spec.SetFileSpec(lldb.SBFileSpec(self.getBuildArtifact("a.out")))
module = lldb.SBModule(spec)
self.assertTrue(module.IsValid())
mytype = module.FindFirstType("foobár")
self.assertTrue(mytype.IsValid())
self.assertTrue(mytype.IsPointerType())
|
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class TestUnicodeSymbols(TestBase):
mydir = TestBase.compute_mydir(__file__)
- @expectedFailureAll(compiler="clang", compiler_version=['<', '7.0'])
? ^^ ^^^^^^^^^^^^^^^
+ @skipIf(compiler="clang", compiler_version=['<', '7.0'])
? ^^^ ^^
def test_union_members(self):
self.build()
spec = lldb.SBModuleSpec()
spec.SetFileSpec(lldb.SBFileSpec(self.getBuildArtifact("a.out")))
module = lldb.SBModule(spec)
self.assertTrue(module.IsValid())
mytype = module.FindFirstType("foobár")
self.assertTrue(mytype.IsValid())
self.assertTrue(mytype.IsPointerType())
|
9db490d5d175f108231cc87afd87a593359837e8
|
app/views.py
|
app/views.py
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
@app.route('/')
@app.route('/index')
def index():
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
@app.route('/')
@app.route('/index')
def index():
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
Fix the disconnect after 8 hours bug.
|
Fix the disconnect after 8 hours bug.
|
Python
|
mit
|
jbwhit/hammer-pricer,jbwhit/hammer-pricer
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
- con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
-
@app.route('/')
@app.route('/index')
def index():
+ con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
Fix the disconnect after 8 hours bug.
|
## Code Before:
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
@app.route('/')
@app.route('/index')
def index():
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
## Instruction:
Fix the disconnect after 8 hours bug.
## Code After:
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
@app.route('/')
@app.route('/index')
def index():
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
- con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
-
@app.route('/')
@app.route('/index')
def index():
+ con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
e76777897bed5b9396d126e384555ea230b35784
|
sass_processor/apps.py
|
sass_processor/apps.py
|
from __future__ import unicode_literals
import os
from django.apps import apps, AppConfig
APPS_INCLUDE_DIRS = []
class SassProcessorConfig(AppConfig):
name = 'sass_processor'
verbose_name = "Sass Processor"
_static_dir = 'static'
_sass_exts = ('.scss', '.sass')
def ready(self):
app_configs = apps.get_app_configs()
for app_config in app_configs:
static_dir = os.path.join(app_config.path, self._static_dir)
if os.path.isdir(static_dir):
self.traverse_tree(static_dir)
print(APPS_INCLUDE_DIRS)
@classmethod
def traverse_tree(cls, static_dir):
"""traverse the static folders an look for at least one file ending in .scss/.sass"""
for root, dirs, files in os.walk(static_dir):
for filename in files:
basename, ext = os.path.splitext(filename)
if basename.startswith('_') and ext in cls._sass_exts:
APPS_INCLUDE_DIRS.append(static_dir)
return
|
from __future__ import unicode_literals
import os
from django.apps import apps, AppConfig
from django.conf import settings
from django.core.files.storage import get_storage_class
APPS_INCLUDE_DIRS = []
class SassProcessorConfig(AppConfig):
name = 'sass_processor'
verbose_name = "Sass Processor"
_sass_exts = ('.scss', '.sass')
_storage = get_storage_class(import_path=settings.STATICFILES_STORAGE)()
def ready(self):
app_configs = apps.get_app_configs()
for app_config in app_configs:
static_dir = os.path.join(app_config.path, self._storage.base_url.strip(os.path.sep))
if os.path.isdir(static_dir):
self.traverse_tree(static_dir)
@classmethod
def traverse_tree(cls, static_dir):
"""traverse the static folders an look for at least one file ending in .scss/.sass"""
for root, dirs, files in os.walk(static_dir):
for filename in files:
basename, ext = os.path.splitext(filename)
if basename.startswith('_') and ext in cls._sass_exts:
APPS_INCLUDE_DIRS.append(static_dir)
return
|
Use StaticFileStorage to determine source directories
|
Use StaticFileStorage to determine source directories
|
Python
|
mit
|
jrief/django-sass-processor,jrief/django-sass-processor
|
from __future__ import unicode_literals
import os
from django.apps import apps, AppConfig
+ from django.conf import settings
+ from django.core.files.storage import get_storage_class
APPS_INCLUDE_DIRS = []
class SassProcessorConfig(AppConfig):
name = 'sass_processor'
verbose_name = "Sass Processor"
- _static_dir = 'static'
_sass_exts = ('.scss', '.sass')
+ _storage = get_storage_class(import_path=settings.STATICFILES_STORAGE)()
def ready(self):
app_configs = apps.get_app_configs()
for app_config in app_configs:
- static_dir = os.path.join(app_config.path, self._static_dir)
+ static_dir = os.path.join(app_config.path, self._storage.base_url.strip(os.path.sep))
if os.path.isdir(static_dir):
self.traverse_tree(static_dir)
-
- print(APPS_INCLUDE_DIRS)
@classmethod
def traverse_tree(cls, static_dir):
"""traverse the static folders an look for at least one file ending in .scss/.sass"""
for root, dirs, files in os.walk(static_dir):
for filename in files:
basename, ext = os.path.splitext(filename)
if basename.startswith('_') and ext in cls._sass_exts:
APPS_INCLUDE_DIRS.append(static_dir)
return
|
Use StaticFileStorage to determine source directories
|
## Code Before:
from __future__ import unicode_literals
import os
from django.apps import apps, AppConfig
APPS_INCLUDE_DIRS = []
class SassProcessorConfig(AppConfig):
name = 'sass_processor'
verbose_name = "Sass Processor"
_static_dir = 'static'
_sass_exts = ('.scss', '.sass')
def ready(self):
app_configs = apps.get_app_configs()
for app_config in app_configs:
static_dir = os.path.join(app_config.path, self._static_dir)
if os.path.isdir(static_dir):
self.traverse_tree(static_dir)
print(APPS_INCLUDE_DIRS)
@classmethod
def traverse_tree(cls, static_dir):
"""traverse the static folders an look for at least one file ending in .scss/.sass"""
for root, dirs, files in os.walk(static_dir):
for filename in files:
basename, ext = os.path.splitext(filename)
if basename.startswith('_') and ext in cls._sass_exts:
APPS_INCLUDE_DIRS.append(static_dir)
return
## Instruction:
Use StaticFileStorage to determine source directories
## Code After:
from __future__ import unicode_literals
import os
from django.apps import apps, AppConfig
from django.conf import settings
from django.core.files.storage import get_storage_class
APPS_INCLUDE_DIRS = []
class SassProcessorConfig(AppConfig):
name = 'sass_processor'
verbose_name = "Sass Processor"
_sass_exts = ('.scss', '.sass')
_storage = get_storage_class(import_path=settings.STATICFILES_STORAGE)()
def ready(self):
app_configs = apps.get_app_configs()
for app_config in app_configs:
static_dir = os.path.join(app_config.path, self._storage.base_url.strip(os.path.sep))
if os.path.isdir(static_dir):
self.traverse_tree(static_dir)
@classmethod
def traverse_tree(cls, static_dir):
"""traverse the static folders an look for at least one file ending in .scss/.sass"""
for root, dirs, files in os.walk(static_dir):
for filename in files:
basename, ext = os.path.splitext(filename)
if basename.startswith('_') and ext in cls._sass_exts:
APPS_INCLUDE_DIRS.append(static_dir)
return
|
from __future__ import unicode_literals
import os
from django.apps import apps, AppConfig
+ from django.conf import settings
+ from django.core.files.storage import get_storage_class
APPS_INCLUDE_DIRS = []
class SassProcessorConfig(AppConfig):
name = 'sass_processor'
verbose_name = "Sass Processor"
- _static_dir = 'static'
_sass_exts = ('.scss', '.sass')
+ _storage = get_storage_class(import_path=settings.STATICFILES_STORAGE)()
def ready(self):
app_configs = apps.get_app_configs()
for app_config in app_configs:
- static_dir = os.path.join(app_config.path, self._static_dir)
? ^^^^^^
+ static_dir = os.path.join(app_config.path, self._storage.base_url.strip(os.path.sep))
? +++++++++++++++++++++++++ ^^^^^ +
if os.path.isdir(static_dir):
self.traverse_tree(static_dir)
-
- print(APPS_INCLUDE_DIRS)
@classmethod
def traverse_tree(cls, static_dir):
"""traverse the static folders an look for at least one file ending in .scss/.sass"""
for root, dirs, files in os.walk(static_dir):
for filename in files:
basename, ext = os.path.splitext(filename)
if basename.startswith('_') and ext in cls._sass_exts:
APPS_INCLUDE_DIRS.append(static_dir)
return
|
6cb215211bff754f531126ac44df03e761b3d7fc
|
pagerduty_events_api/tests/test_pagerduty_incident.py
|
pagerduty_events_api/tests/test_pagerduty_incident.py
|
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
|
from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
|
Use data provider in PD incident tests.
|
Use data provider in PD incident tests.
|
Python
|
mit
|
BlasiusVonSzerencsi/pagerduty-events-api
|
+ from ddt import ddt, data, unpack
+
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
+ @ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
+ @data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
- def test_acknowledge_should_make_pagerduty_api_call(self, post):
+ def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
- self.__subject.acknowledge()
+ getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
- 'event_type': 'acknowledge',
+ 'event_type': action,
'incident_key': 'my_incident_key'})
- @patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
- def test_resolve_should_make_pagerduty_api_call(self, post):
- post.return_value = {}
-
- self.__subject.resolve()
-
- post.assert_called_once_with({'service_key': 'my_service_key',
- 'event_type': 'resolve',
- 'incident_key': 'my_incident_key'})
-
|
Use data provider in PD incident tests.
|
## Code Before:
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
## Instruction:
Use data provider in PD incident tests.
## Code After:
from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
|
+ from ddt import ddt, data, unpack
+
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
+ @ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
+ @data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
- def test_acknowledge_should_make_pagerduty_api_call(self, post):
? ------------
+ def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
? ++++++++++++ + ++++++++
post.return_value = {}
- self.__subject.acknowledge()
+ getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
- 'event_type': 'acknowledge',
? - ^ --------
+ 'event_type': action,
? ^^^
'incident_key': 'my_incident_key'})
-
- @patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
- def test_resolve_should_make_pagerduty_api_call(self, post):
- post.return_value = {}
-
- self.__subject.resolve()
-
- post.assert_called_once_with({'service_key': 'my_service_key',
- 'event_type': 'resolve',
- 'incident_key': 'my_incident_key'})
|
d93af9d0dcf09cd49071fc7f46d40e8fda30f96e
|
python/setup_fsurfer_backend.py
|
python/setup_fsurfer_backend.py
|
from distutils.core import setup
setup(name='fsurfer-backend',
version='PKG_VERSION',
description='Scripts to handle background freesurfer processing',
author='Suchandra Thapa',
author_email='[email protected]',
url='https://github.com/OSGConnect/freesurfer_workflow',
scripts=['process_mri.py',
'update_fsurf_job.py',
'purge_inputs.py',
'purge_results.py',
'warn_purge.py',
'delete_jobs.py',
'fsurf_user_admin.py'],
license='Apache 2.0')
|
from distutils.core import setup
setup(name='fsurfer-backend',
version='PKG_VERSION',
description='Scripts to handle background freesurfer processing',
author='Suchandra Thapa',
author_email='[email protected]',
url='https://github.com/OSGConnect/freesurfer_workflow',
scripts=['process_mri.py',
'update_fsurf_job.py',
'purge_inputs.py',
'purge_results.py',
'warn_purge.py',
'delete_jobs.py',
'task_completed.py',
'fsurf_user_admin.py'],
license='Apache 2.0')
|
Include new script in packaging
|
Include new script in packaging
|
Python
|
apache-2.0
|
OSGConnect/freesurfer_workflow,OSGConnect/freesurfer_workflow
|
from distutils.core import setup
setup(name='fsurfer-backend',
version='PKG_VERSION',
description='Scripts to handle background freesurfer processing',
author='Suchandra Thapa',
author_email='[email protected]',
url='https://github.com/OSGConnect/freesurfer_workflow',
scripts=['process_mri.py',
'update_fsurf_job.py',
'purge_inputs.py',
'purge_results.py',
'warn_purge.py',
'delete_jobs.py',
+ 'task_completed.py',
'fsurf_user_admin.py'],
license='Apache 2.0')
|
Include new script in packaging
|
## Code Before:
from distutils.core import setup
setup(name='fsurfer-backend',
version='PKG_VERSION',
description='Scripts to handle background freesurfer processing',
author='Suchandra Thapa',
author_email='[email protected]',
url='https://github.com/OSGConnect/freesurfer_workflow',
scripts=['process_mri.py',
'update_fsurf_job.py',
'purge_inputs.py',
'purge_results.py',
'warn_purge.py',
'delete_jobs.py',
'fsurf_user_admin.py'],
license='Apache 2.0')
## Instruction:
Include new script in packaging
## Code After:
from distutils.core import setup
setup(name='fsurfer-backend',
version='PKG_VERSION',
description='Scripts to handle background freesurfer processing',
author='Suchandra Thapa',
author_email='[email protected]',
url='https://github.com/OSGConnect/freesurfer_workflow',
scripts=['process_mri.py',
'update_fsurf_job.py',
'purge_inputs.py',
'purge_results.py',
'warn_purge.py',
'delete_jobs.py',
'task_completed.py',
'fsurf_user_admin.py'],
license='Apache 2.0')
|
from distutils.core import setup
setup(name='fsurfer-backend',
version='PKG_VERSION',
description='Scripts to handle background freesurfer processing',
author='Suchandra Thapa',
author_email='[email protected]',
url='https://github.com/OSGConnect/freesurfer_workflow',
scripts=['process_mri.py',
'update_fsurf_job.py',
'purge_inputs.py',
'purge_results.py',
'warn_purge.py',
'delete_jobs.py',
+ 'task_completed.py',
'fsurf_user_admin.py'],
license='Apache 2.0')
|
eda5e7e2bb83f35e18cd0b5402636d4e930e02b9
|
mamba/cli.py
|
mamba/cli.py
|
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
Use a choices for specifiying type of reporter
|
Use a choices for specifiying type of reporter
|
Python
|
mit
|
dex4er/mamba,nestorsalceda/mamba,angelsanz/mamba,jaimegildesagredo/mamba,markng/mamba,eferro/mamba,alejandrodob/mamba
|
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
- parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
+ parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
Use a choices for specifiying type of reporter
|
## Code Before:
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
## Instruction:
Use a choices for specifiying type of reporter
## Code After:
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
- parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
+ parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
? +++++++++++++++++++++++++++++++++++++++
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
95d9bb3a9500d80b5064c5fb4d5bd7b30406d1ae
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
requires = "grpc/1.17.2@inexorgame/stable",
generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing
build_requires = "PremakeGen/0.1@memsharded/testing"
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
requires = "grpc/1.44.0@",
generators = "cmake", "premake" # The builtin premake generator
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
Fix update remote to ConanCenter and grpc to highest buildable/supported version
|
Fix update remote to ConanCenter and grpc to highest buildable/supported version
|
Python
|
apache-2.0
|
jinq0123/grpc_cb_core,jinq0123/grpc_cb_core,jinq0123/grpc_cb_core
|
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
- requires = "grpc/1.17.2@inexorgame/stable",
+ requires = "grpc/1.44.0@",
+ generators = "cmake", "premake" # The builtin premake generator
- generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing
- build_requires = "PremakeGen/0.1@memsharded/testing"
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
Fix update remote to ConanCenter and grpc to highest buildable/supported version
|
## Code Before:
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
requires = "grpc/1.17.2@inexorgame/stable",
generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing
build_requires = "PremakeGen/0.1@memsharded/testing"
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
## Instruction:
Fix update remote to ConanCenter and grpc to highest buildable/supported version
## Code After:
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
requires = "grpc/1.44.0@",
generators = "cmake", "premake" # The builtin premake generator
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
- requires = "grpc/1.17.2@inexorgame/stable",
+ requires = "grpc/1.44.0@",
+ generators = "cmake", "premake" # The builtin premake generator
- generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing
- build_requires = "PremakeGen/0.1@memsharded/testing"
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
39ea336297b0479abb29a70f831b2a02a01fcc18
|
portas/portas/utils.py
|
portas/portas/utils.py
|
import contextlib
import functools
import logging
import sys
import webob.exc
LOG = logging.getLogger(__name__)
def http_success_code(code):
"""Attaches response code to a method.
This decorator associates a response code with a method. Note
that the function attributes are directly manipulated; the method
is not wrapped.
"""
def decorator(func):
func.wsgi_code = code
return func
return decorator
def verify_tenant(func):
@functools.wraps(func)
def __inner(self, req, tenant_id, *args, **kwargs):
if hasattr(req, 'context') and tenant_id != req.context.tenant_id:
LOG.info('User is not authorized to access this tenant.')
raise webob.exc.HTTPUnauthorized
return func(self, req, tenant_id, *args, **kwargs)
return __inner
def require_admin(func):
@functools.wraps(func)
def __inner(self, req, *args, **kwargs):
if hasattr(req, 'context') and not req.context.is_admin:
LOG.info('User has no admin priviledges.')
raise webob.exc.HTTPUnauthorized
return func(self, req, *args, **kwargs)
return __inner
@contextlib.contextmanager
def save_and_reraise_exception():
"""Save current exception, run some code and then re-raise.
In some cases the exception context can be cleared, resulting in None
being attempted to be reraised after an exception handler is run. This
can happen when eventlet switches greenthreads or when running an
exception handler, code raises and catches an exception. In both
cases the exception context will be cleared.
To work around this, we save the exception state, run handler code, and
then re-raise the original exception. If another exception occurs, the
saved exception is logged and the new exception is reraised.
"""
type_, value, traceback = sys.exc_info()
try:
yield
except Exception:
LOG.error('Original exception being dropped',
exc_info=(type_, value, traceback))
raise
raise type_, value, traceback
|
import logging
LOG = logging.getLogger(__name__)
# def verify_tenant(func):
# @functools.wraps(func)
# def __inner(self, req, tenant_id, *args, **kwargs):
# if hasattr(req, 'context') and tenant_id != req.context.tenant:
# LOG.info('User is not authorized to access this tenant.')
# raise webob.exc.HTTPUnauthorized
# return func(self, req, tenant_id, *args, **kwargs)
# return __inner
#
#
# def require_admin(func):
# @functools.wraps(func)
# def __inner(self, req, *args, **kwargs):
# if hasattr(req, 'context') and not req.context.is_admin:
# LOG.info('User has no admin priviledges.')
# raise webob.exc.HTTPUnauthorized
# return func(self, req, *args, **kwargs)
# return __inner
|
Remove unnecessary blocks of code
|
Remove unnecessary blocks of code
|
Python
|
apache-2.0
|
Bloomie/murano-agent,openstack/python-muranoclient,ativelkov/murano-api,sajuptpm/murano,NeCTAR-RC/murano,satish-avninetworks/murano,satish-avninetworks/murano,openstack/murano,openstack/murano-agent,openstack/murano-agent,telefonicaid/murano,satish-avninetworks/murano,DavidPurcell/murano_temp,openstack/murano-agent,sergmelikyan/murano,chenyujie/hybrid-murano,olivierlemasle/murano,olivierlemasle/murano,ativelkov/murano-api,sajuptpm/murano,Bloomie/murano-agent,satish-avninetworks/murano,openstack/murano,telefonicaid/murano-agent,olivierlemasle/murano,chenyujie/hybrid-murano,telefonicaid/murano-agent,Bloomie/murano-agent,openstack/python-muranoclient,telefonicaid/murano-agent,DavidPurcell/murano_temp,openstack/murano-agent,DavidPurcell/murano_temp,NeCTAR-RC/murano,NeCTAR-RC/murano,telefonicaid/murano,olivierlemasle/murano,Bloomie/murano-agent,DavidPurcell/murano_temp,sergmelikyan/murano,NeCTAR-RC/murano
|
- import contextlib
- import functools
import logging
- import sys
-
- import webob.exc
LOG = logging.getLogger(__name__)
- def http_success_code(code):
- """Attaches response code to a method.
-
- This decorator associates a response code with a method. Note
- that the function attributes are directly manipulated; the method
- is not wrapped.
- """
-
- def decorator(func):
- func.wsgi_code = code
- return func
- return decorator
+ # def verify_tenant(func):
+ # @functools.wraps(func)
+ # def __inner(self, req, tenant_id, *args, **kwargs):
+ # if hasattr(req, 'context') and tenant_id != req.context.tenant:
+ # LOG.info('User is not authorized to access this tenant.')
+ # raise webob.exc.HTTPUnauthorized
+ # return func(self, req, tenant_id, *args, **kwargs)
+ # return __inner
+ #
+ #
+ # def require_admin(func):
+ # @functools.wraps(func)
+ # def __inner(self, req, *args, **kwargs):
+ # if hasattr(req, 'context') and not req.context.is_admin:
+ # LOG.info('User has no admin priviledges.')
+ # raise webob.exc.HTTPUnauthorized
+ # return func(self, req, *args, **kwargs)
+ # return __inner
- def verify_tenant(func):
- @functools.wraps(func)
- def __inner(self, req, tenant_id, *args, **kwargs):
- if hasattr(req, 'context') and tenant_id != req.context.tenant_id:
- LOG.info('User is not authorized to access this tenant.')
- raise webob.exc.HTTPUnauthorized
- return func(self, req, tenant_id, *args, **kwargs)
- return __inner
-
-
- def require_admin(func):
- @functools.wraps(func)
- def __inner(self, req, *args, **kwargs):
- if hasattr(req, 'context') and not req.context.is_admin:
- LOG.info('User has no admin priviledges.')
- raise webob.exc.HTTPUnauthorized
- return func(self, req, *args, **kwargs)
- return __inner
-
-
- @contextlib.contextmanager
- def save_and_reraise_exception():
- """Save current exception, run some code and then re-raise.
-
- In some cases the exception context can be cleared, resulting in None
- being attempted to be reraised after an exception handler is run. This
- can happen when eventlet switches greenthreads or when running an
- exception handler, code raises and catches an exception. In both
- cases the exception context will be cleared.
-
- To work around this, we save the exception state, run handler code, and
- then re-raise the original exception. If another exception occurs, the
- saved exception is logged and the new exception is reraised.
- """
- type_, value, traceback = sys.exc_info()
- try:
- yield
- except Exception:
- LOG.error('Original exception being dropped',
- exc_info=(type_, value, traceback))
- raise
- raise type_, value, traceback
-
|
Remove unnecessary blocks of code
|
## Code Before:
import contextlib
import functools
import logging
import sys
import webob.exc
LOG = logging.getLogger(__name__)
def http_success_code(code):
"""Attaches response code to a method.
This decorator associates a response code with a method. Note
that the function attributes are directly manipulated; the method
is not wrapped.
"""
def decorator(func):
func.wsgi_code = code
return func
return decorator
def verify_tenant(func):
@functools.wraps(func)
def __inner(self, req, tenant_id, *args, **kwargs):
if hasattr(req, 'context') and tenant_id != req.context.tenant_id:
LOG.info('User is not authorized to access this tenant.')
raise webob.exc.HTTPUnauthorized
return func(self, req, tenant_id, *args, **kwargs)
return __inner
def require_admin(func):
@functools.wraps(func)
def __inner(self, req, *args, **kwargs):
if hasattr(req, 'context') and not req.context.is_admin:
LOG.info('User has no admin priviledges.')
raise webob.exc.HTTPUnauthorized
return func(self, req, *args, **kwargs)
return __inner
@contextlib.contextmanager
def save_and_reraise_exception():
"""Save current exception, run some code and then re-raise.
In some cases the exception context can be cleared, resulting in None
being attempted to be reraised after an exception handler is run. This
can happen when eventlet switches greenthreads or when running an
exception handler, code raises and catches an exception. In both
cases the exception context will be cleared.
To work around this, we save the exception state, run handler code, and
then re-raise the original exception. If another exception occurs, the
saved exception is logged and the new exception is reraised.
"""
type_, value, traceback = sys.exc_info()
try:
yield
except Exception:
LOG.error('Original exception being dropped',
exc_info=(type_, value, traceback))
raise
raise type_, value, traceback
## Instruction:
Remove unnecessary blocks of code
## Code After:
import logging
LOG = logging.getLogger(__name__)
# def verify_tenant(func):
# @functools.wraps(func)
# def __inner(self, req, tenant_id, *args, **kwargs):
# if hasattr(req, 'context') and tenant_id != req.context.tenant:
# LOG.info('User is not authorized to access this tenant.')
# raise webob.exc.HTTPUnauthorized
# return func(self, req, tenant_id, *args, **kwargs)
# return __inner
#
#
# def require_admin(func):
# @functools.wraps(func)
# def __inner(self, req, *args, **kwargs):
# if hasattr(req, 'context') and not req.context.is_admin:
# LOG.info('User has no admin priviledges.')
# raise webob.exc.HTTPUnauthorized
# return func(self, req, *args, **kwargs)
# return __inner
|
- import contextlib
- import functools
import logging
- import sys
-
- import webob.exc
LOG = logging.getLogger(__name__)
- def http_success_code(code):
- """Attaches response code to a method.
+ # def verify_tenant(func):
+ # @functools.wraps(func)
+ # def __inner(self, req, tenant_id, *args, **kwargs):
+ # if hasattr(req, 'context') and tenant_id != req.context.tenant:
+ # LOG.info('User is not authorized to access this tenant.')
+ # raise webob.exc.HTTPUnauthorized
+ # return func(self, req, tenant_id, *args, **kwargs)
+ # return __inner
+ #
+ #
+ # def require_admin(func):
+ # @functools.wraps(func)
+ # def __inner(self, req, *args, **kwargs):
+ # if hasattr(req, 'context') and not req.context.is_admin:
+ # LOG.info('User has no admin priviledges.')
+ # raise webob.exc.HTTPUnauthorized
+ # return func(self, req, *args, **kwargs)
+ # return __inner
- This decorator associates a response code with a method. Note
- that the function attributes are directly manipulated; the method
- is not wrapped.
- """
-
- def decorator(func):
- func.wsgi_code = code
- return func
- return decorator
-
-
- def verify_tenant(func):
- @functools.wraps(func)
- def __inner(self, req, tenant_id, *args, **kwargs):
- if hasattr(req, 'context') and tenant_id != req.context.tenant_id:
- LOG.info('User is not authorized to access this tenant.')
- raise webob.exc.HTTPUnauthorized
- return func(self, req, tenant_id, *args, **kwargs)
- return __inner
-
-
- def require_admin(func):
- @functools.wraps(func)
- def __inner(self, req, *args, **kwargs):
- if hasattr(req, 'context') and not req.context.is_admin:
- LOG.info('User has no admin priviledges.')
- raise webob.exc.HTTPUnauthorized
- return func(self, req, *args, **kwargs)
- return __inner
-
-
- @contextlib.contextmanager
- def save_and_reraise_exception():
- """Save current exception, run some code and then re-raise.
-
- In some cases the exception context can be cleared, resulting in None
- being attempted to be reraised after an exception handler is run. This
- can happen when eventlet switches greenthreads or when running an
- exception handler, code raises and catches an exception. In both
- cases the exception context will be cleared.
-
- To work around this, we save the exception state, run handler code, and
- then re-raise the original exception. If another exception occurs, the
- saved exception is logged and the new exception is reraised.
- """
- type_, value, traceback = sys.exc_info()
- try:
- yield
- except Exception:
- LOG.error('Original exception being dropped',
- exc_info=(type_, value, traceback))
- raise
- raise type_, value, traceback
|
12c57e52d3f107ce9723f33e7f35ef752bb8f3bc
|
axelrod/tests/unit/test_deterministic_cache.py
|
axelrod/tests/unit/test_deterministic_cache.py
|
import unittest
class TestDeterministicCache(unittest.TestCase):
def test_init(self):
pass
def test_setitem(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
import unittest
from axelrod import DeterministicCache, TitForTat, Defector
class TestDeterministicCache(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_key1 = (TitForTat, Defector)
cls.test_value1 = [('C', 'D'), ('D', 'D'), ('D', 'D')]
def test_basic_init(self):
cache = DeterministicCache()
self.assertTrue(cache.mutable)
self.assertEqual(cache.turns, None)
def test_init_from_file(self):
pass
def test_setitem(self):
cache = DeterministicCache()
cache[self.test_key1] = self.test_value1
self.assertEqual(cache[self.test_key1], self.test_value1)
def test_set_immutable_cache(self):
cache = DeterministicCache()
cache.mutable = False
with self.assertRaises(ValueError):
cache[self.test_key1] = self.test_value1
def test_is_valid_key(self):
cache = DeterministicCache()
self.assertTrue(cache._is_valid_key(self.test_key1))
# Should return false if key is not a tuple
self.assertFalse(cache._is_valid_key('test'))
# Should return false if tuple is not a pair
self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
# Should return false if contents of tuple are not axelrod Players
self.assertFalse(cache._is_valid_key(('test', 'test')))
def test_is_valid_value(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
Add content for basic tests
|
Add content for basic tests
|
Python
|
mit
|
ranjinidas/Axelrod,marcharper/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod
|
import unittest
+ from axelrod import DeterministicCache, TitForTat, Defector
class TestDeterministicCache(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.test_key1 = (TitForTat, Defector)
+ cls.test_value1 = [('C', 'D'), ('D', 'D'), ('D', 'D')]
+
- def test_init(self):
+ def test_basic_init(self):
+ cache = DeterministicCache()
+ self.assertTrue(cache.mutable)
+ self.assertEqual(cache.turns, None)
+
+ def test_init_from_file(self):
pass
def test_setitem(self):
+ cache = DeterministicCache()
+ cache[self.test_key1] = self.test_value1
+ self.assertEqual(cache[self.test_key1], self.test_value1)
+
+ def test_set_immutable_cache(self):
+ cache = DeterministicCache()
+ cache.mutable = False
+ with self.assertRaises(ValueError):
+ cache[self.test_key1] = self.test_value1
+
+ def test_is_valid_key(self):
+ cache = DeterministicCache()
+ self.assertTrue(cache._is_valid_key(self.test_key1))
+ # Should return false if key is not a tuple
+ self.assertFalse(cache._is_valid_key('test'))
+ # Should return false if tuple is not a pair
+ self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
+ # Should return false if contents of tuple are not axelrod Players
+ self.assertFalse(cache._is_valid_key(('test', 'test')))
+
+ def test_is_valid_value(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
Add content for basic tests
|
## Code Before:
import unittest
class TestDeterministicCache(unittest.TestCase):
def test_init(self):
pass
def test_setitem(self):
pass
def test_save(self):
pass
def test_load(self):
pass
## Instruction:
Add content for basic tests
## Code After:
import unittest
from axelrod import DeterministicCache, TitForTat, Defector
class TestDeterministicCache(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_key1 = (TitForTat, Defector)
cls.test_value1 = [('C', 'D'), ('D', 'D'), ('D', 'D')]
def test_basic_init(self):
cache = DeterministicCache()
self.assertTrue(cache.mutable)
self.assertEqual(cache.turns, None)
def test_init_from_file(self):
pass
def test_setitem(self):
cache = DeterministicCache()
cache[self.test_key1] = self.test_value1
self.assertEqual(cache[self.test_key1], self.test_value1)
def test_set_immutable_cache(self):
cache = DeterministicCache()
cache.mutable = False
with self.assertRaises(ValueError):
cache[self.test_key1] = self.test_value1
def test_is_valid_key(self):
cache = DeterministicCache()
self.assertTrue(cache._is_valid_key(self.test_key1))
# Should return false if key is not a tuple
self.assertFalse(cache._is_valid_key('test'))
# Should return false if tuple is not a pair
self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
# Should return false if contents of tuple are not axelrod Players
self.assertFalse(cache._is_valid_key(('test', 'test')))
def test_is_valid_value(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
import unittest
+ from axelrod import DeterministicCache, TitForTat, Defector
class TestDeterministicCache(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.test_key1 = (TitForTat, Defector)
+ cls.test_value1 = [('C', 'D'), ('D', 'D'), ('D', 'D')]
+
- def test_init(self):
+ def test_basic_init(self):
? ++++++
+ cache = DeterministicCache()
+ self.assertTrue(cache.mutable)
+ self.assertEqual(cache.turns, None)
+
+ def test_init_from_file(self):
pass
def test_setitem(self):
+ cache = DeterministicCache()
+ cache[self.test_key1] = self.test_value1
+ self.assertEqual(cache[self.test_key1], self.test_value1)
+
+ def test_set_immutable_cache(self):
+ cache = DeterministicCache()
+ cache.mutable = False
+ with self.assertRaises(ValueError):
+ cache[self.test_key1] = self.test_value1
+
+ def test_is_valid_key(self):
+ cache = DeterministicCache()
+ self.assertTrue(cache._is_valid_key(self.test_key1))
+ # Should return false if key is not a tuple
+ self.assertFalse(cache._is_valid_key('test'))
+ # Should return false if tuple is not a pair
+ self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
+ # Should return false if contents of tuple are not axelrod Players
+ self.assertFalse(cache._is_valid_key(('test', 'test')))
+
+ def test_is_valid_value(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
64636185744a9a64b1b04fcd81ee32930bb145af
|
scores.py
|
scores.py
|
from nameko.rpc import rpc, RpcProxy
class ScoreService(object):
name = 'score_service'
player_service = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_service.get_players()
return sorted(players, key=lambda player: player.score, reverse=True)
|
from nameko.rpc import rpc, RpcProxy
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@rpc
def update_player_score(self, player_id, score):
player = self.player_rpc.get_player(player_id)
player.add_score(score)
|
Add method to update a player's score
|
Add method to update a player's score
|
Python
|
mit
|
radekj/poke-battle,skooda/poke-battle
|
from nameko.rpc import rpc, RpcProxy
class ScoreService(object):
name = 'score_service'
- player_service = RpcProxy('players_service')
+ player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
- players = self.player_service.get_players()
+ players = self.player_rpc.get_players()
- return sorted(players, key=lambda player: player.score, reverse=True)
+ sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
+ return [(p.name, p.score) for p in players]
+ @rpc
+ def update_player_score(self, player_id, score):
+ player = self.player_rpc.get_player(player_id)
+ player.add_score(score)
+
|
Add method to update a player's score
|
## Code Before:
from nameko.rpc import rpc, RpcProxy
class ScoreService(object):
name = 'score_service'
player_service = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_service.get_players()
return sorted(players, key=lambda player: player.score, reverse=True)
## Instruction:
Add method to update a player's score
## Code After:
from nameko.rpc import rpc, RpcProxy
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@rpc
def update_player_score(self, player_id, score):
player = self.player_rpc.get_player(player_id)
player.add_score(score)
|
from nameko.rpc import rpc, RpcProxy
class ScoreService(object):
name = 'score_service'
- player_service = RpcProxy('players_service')
? -- ^^ -
+ player_rpc = RpcProxy('players_service')
? ^
@rpc
def leaderboard(self):
- players = self.player_service.get_players()
? -- ^^ -
+ players = self.player_rpc.get_players()
? ^
- return sorted(players, key=lambda player: player.score, reverse=True)
? ^^ ^
+ sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
? ++ + ^^^^^^^ ^^^
+ return [(p.name, p.score) for p in players]
+
+ @rpc
+ def update_player_score(self, player_id, score):
+ player = self.player_rpc.get_player(player_id)
+ player.add_score(score)
|
8a4819daa627f06e1a0eac87ab44176b7e2a0115
|
openerp/addons/openupgrade_records/lib/apriori.py
|
openerp/addons/openupgrade_records/lib/apriori.py
|
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
|
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
Correct renamed module names for bank-statement-import repository.
|
[FIX] Correct renamed module names for bank-statement-import repository.
|
Python
|
agpl-3.0
|
OpenUpgrade/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade
|
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
- 'account_banking_camt': 'bank_statement_parse_camt',
+ 'account_banking_camt': 'account_bank_statement_import_camt',
- 'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
- 'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
+ 'account_banking_mt940':
+ 'account_bank_statement_import_mt940_base',
+ 'account_banking_nl_ing_mt940':
+ 'account_bank_statement_import_mt940_nl_ing',
+ 'account_banking_nl_rabo_mt940':
+ 'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
Correct renamed module names for bank-statement-import repository.
|
## Code Before:
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
## Instruction:
Correct renamed module names for bank-statement-import repository.
## Code After:
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
- 'account_banking_camt': 'bank_statement_parse_camt',
? ^ ^^
+ 'account_banking_camt': 'account_bank_statement_import_camt',
? ++++++++ ++ ^ ^
- 'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
- 'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
+ 'account_banking_mt940':
+ 'account_bank_statement_import_mt940_base',
+ 'account_banking_nl_ing_mt940':
+ 'account_bank_statement_import_mt940_nl_ing',
+ 'account_banking_nl_rabo_mt940':
+ 'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
74728ef66fd13bfd7ad01f930114c2375e752d13
|
examples/skel.py
|
examples/skel.py
|
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
print spyral.widgets
spyral.widgets.register('Testing', 'a')
print spyral.widgets.Testing(1,2,3)
print spyral.widgets.TextInputWidget
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
Remove some accidentally committed code.
|
Remove some accidentally committed code.
|
Python
|
lgpl-2.1
|
platipy/spyral
|
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
- print spyral.widgets
- spyral.widgets.register('Testing', 'a')
- print spyral.widgets.Testing(1,2,3)
- print spyral.widgets.TextInputWidget
-
-
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
Remove some accidentally committed code.
|
## Code Before:
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
print spyral.widgets
spyral.widgets.register('Testing', 'a')
print spyral.widgets.Testing(1,2,3)
print spyral.widgets.TextInputWidget
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
## Instruction:
Remove some accidentally committed code.
## Code After:
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
try:
import _path
except NameError:
pass
import pygame
import spyral
import sys
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Game(spyral.Scene):
"""
A Scene represents a distinct state of your game. They could be menus,
different subgames, or any other things which are mostly distinct.
A Scene should define two methods, update and render.
"""
def __init__(self):
"""
The __init__ message for a scene should set up the camera(s) for the
scene, and other structures which are needed for the scene
"""
spyral.Scene.__init__(self, SIZE)
self.register("system.quit", sys.exit)
- print spyral.widgets
- spyral.widgets.register('Testing', 'a')
- print spyral.widgets.Testing(1,2,3)
- print spyral.widgets.TextInputWidget
-
-
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
|
c23570f528b3ad27e256ea402fc02231b528b000
|
django_messages/urls.py
|
django_messages/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django_messages.views import *
urlpatterns = patterns('',
url(r'^$', RedirectView.as_view(url='inbox/'), name='messages_redirect'),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\w.@+-]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
|
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django_messages.views import *
urlpatterns = patterns('',
url(r'^$', RedirectView.as_view(permanent=True, url='inbox/'), name='messages_redirect'),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\w.@+-]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
|
Set an explicit value because Default value of 'RedirectView.permanent' will change from True to False in Django 1.9.
|
Set an explicit value because Default value of 'RedirectView.permanent' will change from True to False in Django 1.9.
|
Python
|
bsd-3-clause
|
arneb/django-messages,JordanReiter/django-messages,brajeshvit/Messagemodule,nikhil-above/django-messages,tobiasgoecke/django-messages,brajeshvit/Messagemodule,JordanReiter/django-messages,gustavoam/django-messages,tobiasgoecke/django-messages,procrasti/django-messages,Chris7/django-messages,gustavoam/django-messages,nikhil-above/django-messages,arneb/django-messages,procrasti/django-messages,Chris7/django-messages
|
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django_messages.views import *
urlpatterns = patterns('',
- url(r'^$', RedirectView.as_view(url='inbox/'), name='messages_redirect'),
+ url(r'^$', RedirectView.as_view(permanent=True, url='inbox/'), name='messages_redirect'),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\w.@+-]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
|
Set an explicit value because Default value of 'RedirectView.permanent' will change from True to False in Django 1.9.
|
## Code Before:
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django_messages.views import *
urlpatterns = patterns('',
url(r'^$', RedirectView.as_view(url='inbox/'), name='messages_redirect'),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\w.@+-]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
## Instruction:
Set an explicit value because Default value of 'RedirectView.permanent' will change from True to False in Django 1.9.
## Code After:
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django_messages.views import *
urlpatterns = patterns('',
url(r'^$', RedirectView.as_view(permanent=True, url='inbox/'), name='messages_redirect'),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\w.@+-]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
|
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django_messages.views import *
urlpatterns = patterns('',
- url(r'^$', RedirectView.as_view(url='inbox/'), name='messages_redirect'),
+ url(r'^$', RedirectView.as_view(permanent=True, url='inbox/'), name='messages_redirect'),
? ++++++++++++++++
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\w.@+-]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
|
d6759d0abec637753d93cd407fad5e7abc6ec86d
|
astropy/tests/plugins/display.py
|
astropy/tests/plugins/display.py
|
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy documentation for information on '
'migrating to using pytest-astropy to customize the pytest '
'header.', AstropyDeprecationWarning)
|
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy-header documentation for information on '
'migrating to using pytest-astropy-header to customize the '
'pytest header.', AstropyDeprecationWarning)
|
Fix typo in deprecation warning
|
TST: Fix typo in deprecation warning [ci skip]
|
Python
|
bsd-3-clause
|
stargaser/astropy,dhomeier/astropy,saimn/astropy,saimn/astropy,larrybradley/astropy,astropy/astropy,StuartLittlefair/astropy,lpsinger/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,larrybradley/astropy,lpsinger/astropy,MSeifert04/astropy,astropy/astropy,astropy/astropy,MSeifert04/astropy,larrybradley/astropy,larrybradley/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,mhvk/astropy,StuartLittlefair/astropy,saimn/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,mhvk/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,mhvk/astropy,lpsinger/astropy,dhomeier/astropy,stargaser/astropy,pllim/astropy,dhomeier/astropy,saimn/astropy,saimn/astropy,pllim/astropy,larrybradley/astropy,pllim/astropy,pllim/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,mhvk/astropy,pllim/astropy,aleksandr-bakanov/astropy,stargaser/astropy,MSeifert04/astropy,stargaser/astropy
|
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
- from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
+ from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
+ TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
- 'See the pytest-astropy documentation for information on '
+ 'See the pytest-astropy-header documentation for information on '
- 'migrating to using pytest-astropy to customize the pytest '
+ 'migrating to using pytest-astropy-header to customize the '
- 'header.', AstropyDeprecationWarning)
+ 'pytest header.', AstropyDeprecationWarning)
|
Fix typo in deprecation warning
|
## Code Before:
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy documentation for information on '
'migrating to using pytest-astropy to customize the pytest '
'header.', AstropyDeprecationWarning)
## Instruction:
Fix typo in deprecation warning
## Code After:
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy-header documentation for information on '
'migrating to using pytest-astropy-header to customize the '
'pytest header.', AstropyDeprecationWarning)
|
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
- from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
? ----------------
+ from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
? +
+ TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
- 'See the pytest-astropy documentation for information on '
+ 'See the pytest-astropy-header documentation for information on '
? +++++++
- 'migrating to using pytest-astropy to customize the pytest '
? -------
+ 'migrating to using pytest-astropy-header to customize the '
? +++++++
- 'header.', AstropyDeprecationWarning)
+ 'pytest header.', AstropyDeprecationWarning)
? +++++++
|
44faefd4bd0bfa3dede8686903759a033c1072d6
|
flask_simple_serializer/response.py
|
flask_simple_serializer/response.py
|
import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
|
from flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
|
Replace json for flask.json to manage the Response
|
Replace json for flask.json to manage the Response
|
Python
|
mit
|
marcosschroh/Flask-Simple-Serializer
|
- import json
-
from flask import Response as SimpleResponse
+ from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
|
Replace json for flask.json to manage the Response
|
## Code Before:
import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
## Instruction:
Replace json for flask.json to manage the Response
## Code After:
from flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
|
- import json
-
from flask import Response as SimpleResponse
+ from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
|
884071638140d4f351fde68e81117ce95f418557
|
tetrahydra/tests/test_core.py
|
tetrahydra/tests/test_core.py
|
"""Test core functions."""
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
|
"""Test core functions."""
import pytest
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
|
Revert prev commit in this file.
|
Revert prev commit in this file.
|
Python
|
bsd-3-clause
|
ofgulban/tetrahydra
|
"""Test core functions."""
+ import pytest
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
|
Revert prev commit in this file.
|
## Code Before:
"""Test core functions."""
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
## Instruction:
Revert prev commit in this file.
## Code After:
"""Test core functions."""
import pytest
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
|
"""Test core functions."""
+ import pytest
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
|
cf550ac3a00531f2f964fbbb7e27c37071983d26
|
utils/aiohttp_wrap.py
|
utils/aiohttp_wrap.py
|
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
|
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.
|
Python
|
mit
|
Naught0/qtbot
|
import aiohttp
- async def aio_get(url: str):
+ async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
- <<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
- =======
- async with session.get(url) as r:
- if r.status == 200:
- return r
- else:
- return None
- >>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
|
## Code Before:
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
## Instruction:
Revert "Revert "progress on DDG cog & aiohttp wrapper""
## Code After:
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
import aiohttp
- async def aio_get(url: str):
+ async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
- <<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
- =======
- async with session.get(url) as r:
- if r.status == 200:
- return r
- else:
- return None
- >>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
|
f55a00cfd81f8f3c88aaaa5a4b3d63ceb4364a11
|
books/views.py
|
books/views.py
|
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from .models import BookIndex, Book
@csrf_exempt
def book_index(request):
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
@csrf_exempt
def book_detail(request, slug):
page = Book.objects.get(slug=slug)
return redirect('/api/v2/pages/{}'.format(page.pk))
|
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from .models import BookIndex, Book
@csrf_exempt
def book_index(request):
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
@csrf_exempt
def book_detail(request, slug):
try:
page = Book.objects.get(slug=slug)
return redirect('/api/v2/pages/{}'.format(page.pk))
except Book.DoesNotExist:
#no book, return to book index
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
|
Return book index page if book not found by slug
|
Return book index page if book not found by slug
|
Python
|
agpl-3.0
|
openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms
|
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from .models import BookIndex, Book
@csrf_exempt
def book_index(request):
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
@csrf_exempt
def book_detail(request, slug):
+ try:
- page = Book.objects.get(slug=slug)
+ page = Book.objects.get(slug=slug)
- return redirect('/api/v2/pages/{}'.format(page.pk))
+ return redirect('/api/v2/pages/{}'.format(page.pk))
+ except Book.DoesNotExist:
+ #no book, return to book index
+ page = BookIndex.objects.all()[0]
+ return redirect('/api/v2/pages/{}'.format(page.pk))
|
Return book index page if book not found by slug
|
## Code Before:
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from .models import BookIndex, Book
@csrf_exempt
def book_index(request):
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
@csrf_exempt
def book_detail(request, slug):
page = Book.objects.get(slug=slug)
return redirect('/api/v2/pages/{}'.format(page.pk))
## Instruction:
Return book index page if book not found by slug
## Code After:
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from .models import BookIndex, Book
@csrf_exempt
def book_index(request):
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
@csrf_exempt
def book_detail(request, slug):
try:
page = Book.objects.get(slug=slug)
return redirect('/api/v2/pages/{}'.format(page.pk))
except Book.DoesNotExist:
#no book, return to book index
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
|
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from .models import BookIndex, Book
@csrf_exempt
def book_index(request):
page = BookIndex.objects.all()[0]
return redirect('/api/v2/pages/{}'.format(page.pk))
@csrf_exempt
def book_detail(request, slug):
+ try:
- page = Book.objects.get(slug=slug)
+ page = Book.objects.get(slug=slug)
? ++++
- return redirect('/api/v2/pages/{}'.format(page.pk))
+ return redirect('/api/v2/pages/{}'.format(page.pk))
? ++++
+ except Book.DoesNotExist:
+ #no book, return to book index
+ page = BookIndex.objects.all()[0]
+ return redirect('/api/v2/pages/{}'.format(page.pk))
|
ed97a1f811f04693203f6d1c0e9b64649a3da152
|
coney/exceptions.py
|
coney/exceptions.py
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
|
Add a new exception to handle a non-callable handler.
|
Add a new exception to handle a non-callable handler.
|
Python
|
mit
|
cbigler/jackrabbit
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
+
+ class HandlerNotCallableException(ConeyException):
+ def __repr__(self):
+ return 'Handler provided a non-callable object'
+
|
Add a new exception to handle a non-callable handler.
|
## Code Before:
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
## Instruction:
Add a new exception to handle a non-callable handler.
## Code After:
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
+
+
+ class HandlerNotCallableException(ConeyException):
+ def __repr__(self):
+ return 'Handler provided a non-callable object'
|
18ec52a1c34e263e4d909fc1ee19500f9adac26b
|
examples/django_example/example/app/models.py
|
examples/django_example/example/app/models.py
|
from django.db import models
# Create your models here.
|
from django.contrib.auth.models import AbstractUser, UserManager
class CustomUser(AbstractUser):
objects = UserManager()
|
Define a custom user model
|
Define a custom user model
|
Python
|
bsd-3-clause
|
S01780/python-social-auth,tobias47n9e/social-core,falcon1kr/python-social-auth,ByteInternet/python-social-auth,muhammad-ammar/python-social-auth,contracode/python-social-auth,S01780/python-social-auth,clef/python-social-auth,lawrence34/python-social-auth,python-social-auth/social-storage-sqlalchemy,fearlessspider/python-social-auth,MSOpenTech/python-social-auth,Andygmb/python-social-auth,mrwags/python-social-auth,ariestiyansyah/python-social-auth,clef/python-social-auth,bjorand/python-social-auth,cjltsod/python-social-auth,barseghyanartur/python-social-auth,nirmalvp/python-social-auth,Andygmb/python-social-auth,garrett-schlesinger/python-social-auth,henocdz/python-social-auth,VishvajitP/python-social-auth,duoduo369/python-social-auth,merutak/python-social-auth,drxos/python-social-auth,firstjob/python-social-auth,webjunkie/python-social-auth,DhiaEddineSaidi/python-social-auth,python-social-auth/social-app-django,barseghyanartur/python-social-auth,rsteca/python-social-auth,jneves/python-social-auth,mrwags/python-social-auth,mrwags/python-social-auth,frankier/python-social-auth,JJediny/python-social-auth,joelstanner/python-social-auth,lamby/python-social-auth,bjorand/python-social-auth,python-social-auth/social-core,chandolia/python-social-auth,jeyraof/python-social-auth,cmichal/python-social-auth,falcon1kr/python-social-auth,robbiet480/python-social-auth,contracode/python-social-auth,lawrence34/python-social-auth,yprez/python-social-auth,bjorand/python-social-auth,garrett-schlesinger/python-social-auth,clef/python-social-auth,python-social-auth/social-app-django,jameslittle/python-social-auth,tkajtoch/python-social-auth,python-social-auth/social-app-django,JerzySpendel/python-social-auth,muhammad-ammar/python-social-auth,msampathkumar/python-social-auth,webjunkie/python-social-auth,mark-adams/python-social-auth,iruga090/python-social-auth,contracode/python-social-auth,JJediny/python-social-auth,lamby/python-social-auth,cmichal/python-social-auth,alrusdi/python-social-auth,python-social-auth/social-docs,yprez/python-social-auth,san-mate/python-social-auth,jeyraof/python-social-auth,ononeor12/python-social-auth,jneves/python-social-auth,lawrence34/python-social-auth,DhiaEddineSaidi/python-social-auth,python-social-auth/social-app-cherrypy,michael-borisov/python-social-auth,SeanHayes/python-social-auth,lneoe/python-social-auth,joelstanner/python-social-auth,duoduo369/python-social-auth,lneoe/python-social-auth,fearlessspider/python-social-auth,chandolia/python-social-auth,hsr-ba-fs15-dat/python-social-auth,daniula/python-social-auth,VishvajitP/python-social-auth,daniula/python-social-auth,alrusdi/python-social-auth,mark-adams/python-social-auth,barseghyanartur/python-social-auth,rsalmaso/python-social-auth,mathspace/python-social-auth,JJediny/python-social-auth,michael-borisov/python-social-auth,san-mate/python-social-auth,lneoe/python-social-auth,jameslittle/python-social-auth,rsteca/python-social-auth,henocdz/python-social-auth,S01780/python-social-auth,tkajtoch/python-social-auth,tutumcloud/python-social-auth,michael-borisov/python-social-auth,JerzySpendel/python-social-auth,degs098/python-social-auth,robbiet480/python-social-auth,rsalmaso/python-social-auth,nirmalvp/python-social-auth,falcon1kr/python-social-auth,python-social-auth/social-core,ariestiyansyah/python-social-auth,ariestiyansyah/python-social-auth,ByteInternet/python-social-auth,hsr-ba-fs15-dat/python-social-auth,nirmalvp/python-social-auth,DhiaEddineSaidi/python-social-auth,joelstanner/python-social-auth,ononeor12/python-social-auth,wildtetris/python-social-auth,henocdz/python-social-auth,mathspace/python-social-auth,MSOpenTech/python-social-auth,wildtetris/python-social-auth,degs098/python-social-auth,noodle-learns-programming/python-social-auth,SeanHayes/python-social-auth,mchdks/python-social-auth,lamby/python-social-auth,merutak/python-social-auth,jneves/python-social-auth,chandolia/python-social-auth,mchdks/python-social-auth,webjunkie/python-social-auth,ByteInternet/python-social-auth,fearlessspider/python-social-auth,firstjob/python-social-auth,noodle-learns-programming/python-social-auth,VishvajitP/python-social-auth,daniula/python-social-auth,mark-adams/python-social-auth,tkajtoch/python-social-auth,san-mate/python-social-auth,jeyraof/python-social-auth,robbiet480/python-social-auth,wildtetris/python-social-auth,jameslittle/python-social-auth,msampathkumar/python-social-auth,alrusdi/python-social-auth,msampathkumar/python-social-auth,yprez/python-social-auth,firstjob/python-social-auth,ononeor12/python-social-auth,tutumcloud/python-social-auth,noodle-learns-programming/python-social-auth,mathspace/python-social-auth,hsr-ba-fs15-dat/python-social-auth,muhammad-ammar/python-social-auth,degs098/python-social-auth,rsteca/python-social-auth,JerzySpendel/python-social-auth,frankier/python-social-auth,mchdks/python-social-auth,Andygmb/python-social-auth,iruga090/python-social-auth,merutak/python-social-auth,MSOpenTech/python-social-auth,cjltsod/python-social-auth,cmichal/python-social-auth,drxos/python-social-auth,drxos/python-social-auth,iruga090/python-social-auth
|
- from django.db import models
+ from django.contrib.auth.models import AbstractUser, UserManager
- # Create your models here.
+ class CustomUser(AbstractUser):
+ objects = UserManager()
+
|
Define a custom user model
|
## Code Before:
from django.db import models
# Create your models here.
## Instruction:
Define a custom user model
## Code After:
from django.contrib.auth.models import AbstractUser, UserManager
class CustomUser(AbstractUser):
objects = UserManager()
|
- from django.db import models
+ from django.contrib.auth.models import AbstractUser, UserManager
- # Create your models here.
+
+ class CustomUser(AbstractUser):
+ objects = UserManager()
|
c10afc4ebd4d7ec8571c0685c0d87f76b25b3af9
|
scipy/special/_precompute/utils.py
|
scipy/special/_precompute/utils.py
|
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = map(lambda x: mp.mpf(x), b)
return b
|
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = [mp.mpf(x) for x in b]
return b
|
Use list comprehension instead of lambda function
|
Use list comprehension instead of lambda function
|
Python
|
bsd-3-clause
|
grlee77/scipy,WarrenWeckesser/scipy,vigna/scipy,endolith/scipy,andyfaff/scipy,rgommers/scipy,scipy/scipy,grlee77/scipy,mdhaber/scipy,Stefan-Endres/scipy,zerothi/scipy,rgommers/scipy,andyfaff/scipy,scipy/scipy,zerothi/scipy,tylerjereddy/scipy,endolith/scipy,mdhaber/scipy,endolith/scipy,rgommers/scipy,mdhaber/scipy,endolith/scipy,e-q/scipy,WarrenWeckesser/scipy,ilayn/scipy,matthew-brett/scipy,anntzer/scipy,scipy/scipy,tylerjereddy/scipy,Eric89GXL/scipy,vigna/scipy,WarrenWeckesser/scipy,perimosocordiae/scipy,andyfaff/scipy,perimosocordiae/scipy,e-q/scipy,andyfaff/scipy,WarrenWeckesser/scipy,tylerjereddy/scipy,grlee77/scipy,tylerjereddy/scipy,anntzer/scipy,matthew-brett/scipy,tylerjereddy/scipy,rgommers/scipy,WarrenWeckesser/scipy,vigna/scipy,matthew-brett/scipy,zerothi/scipy,Stefan-Endres/scipy,endolith/scipy,vigna/scipy,ilayn/scipy,ilayn/scipy,zerothi/scipy,mdhaber/scipy,perimosocordiae/scipy,WarrenWeckesser/scipy,anntzer/scipy,zerothi/scipy,perimosocordiae/scipy,zerothi/scipy,andyfaff/scipy,scipy/scipy,vigna/scipy,anntzer/scipy,e-q/scipy,scipy/scipy,perimosocordiae/scipy,matthew-brett/scipy,Eric89GXL/scipy,grlee77/scipy,mdhaber/scipy,anntzer/scipy,ilayn/scipy,ilayn/scipy,matthew-brett/scipy,anntzer/scipy,grlee77/scipy,Stefan-Endres/scipy,e-q/scipy,ilayn/scipy,scipy/scipy,Eric89GXL/scipy,mdhaber/scipy,endolith/scipy,Eric89GXL/scipy,rgommers/scipy,Eric89GXL/scipy,andyfaff/scipy,Eric89GXL/scipy,Stefan-Endres/scipy,Stefan-Endres/scipy,Stefan-Endres/scipy,perimosocordiae/scipy,e-q/scipy
|
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
- b = map(lambda x: mp.mpf(x), b)
+ b = [mp.mpf(x) for x in b]
return b
|
Use list comprehension instead of lambda function
|
## Code Before:
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = map(lambda x: mp.mpf(x), b)
return b
## Instruction:
Use list comprehension instead of lambda function
## Code After:
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = [mp.mpf(x) for x in b]
return b
|
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
- b = map(lambda x: mp.mpf(x), b)
+ b = [mp.mpf(x) for x in b]
return b
|
2abf0e6b9009abd7c34b459ad9e3f2c6223bb043
|
polyaxon/db/getters/experiment_groups.py
|
polyaxon/db/getters/experiment_groups.py
|
import logging
from db.models.experiment_groups import ExperimentGroup
_logger = logging.getLogger('polyaxon.db')
def get_valid_experiment_group(experiment_group_id):
try:
return ExperimentGroup.objects.get(id=experiment_group_id)
except ExperimentGroup.DoesNotExist:
_logger.info('ExperimentGroup `%s` was not found.', experiment_group_id)
return None
def get_running_experiment_group(experiment_group_id):
experiment_group = get_valid_experiment_group(experiment_group_id=experiment_group_id)
if not experiment_group.is_running:
_logger.info('ExperimentGroup `%s` is not running.', experiment_group_id)
return None
return experiment_group
|
import logging
from db.models.experiment_groups import ExperimentGroup
_logger = logging.getLogger('polyaxon.db')
def get_valid_experiment_group(experiment_group_id):
try:
return ExperimentGroup.objects.get(id=experiment_group_id)
except ExperimentGroup.DoesNotExist:
_logger.info('ExperimentGroup `%s` was not found.', experiment_group_id)
return None
def get_running_experiment_group(experiment_group_id):
experiment_group = get_valid_experiment_group(experiment_group_id=experiment_group_id)
if not experiment_group or not experiment_group.is_running:
_logger.info('ExperimentGroup `%s` is not running.', experiment_group_id)
return None
return experiment_group
|
Add condition to check if experiment group exists before checking status
|
Add condition to check if experiment group exists before checking status
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
import logging
from db.models.experiment_groups import ExperimentGroup
_logger = logging.getLogger('polyaxon.db')
def get_valid_experiment_group(experiment_group_id):
try:
return ExperimentGroup.objects.get(id=experiment_group_id)
except ExperimentGroup.DoesNotExist:
_logger.info('ExperimentGroup `%s` was not found.', experiment_group_id)
return None
def get_running_experiment_group(experiment_group_id):
experiment_group = get_valid_experiment_group(experiment_group_id=experiment_group_id)
- if not experiment_group.is_running:
+ if not experiment_group or not experiment_group.is_running:
_logger.info('ExperimentGroup `%s` is not running.', experiment_group_id)
return None
return experiment_group
|
Add condition to check if experiment group exists before checking status
|
## Code Before:
import logging
from db.models.experiment_groups import ExperimentGroup
_logger = logging.getLogger('polyaxon.db')
def get_valid_experiment_group(experiment_group_id):
try:
return ExperimentGroup.objects.get(id=experiment_group_id)
except ExperimentGroup.DoesNotExist:
_logger.info('ExperimentGroup `%s` was not found.', experiment_group_id)
return None
def get_running_experiment_group(experiment_group_id):
experiment_group = get_valid_experiment_group(experiment_group_id=experiment_group_id)
if not experiment_group.is_running:
_logger.info('ExperimentGroup `%s` is not running.', experiment_group_id)
return None
return experiment_group
## Instruction:
Add condition to check if experiment group exists before checking status
## Code After:
import logging
from db.models.experiment_groups import ExperimentGroup
_logger = logging.getLogger('polyaxon.db')
def get_valid_experiment_group(experiment_group_id):
try:
return ExperimentGroup.objects.get(id=experiment_group_id)
except ExperimentGroup.DoesNotExist:
_logger.info('ExperimentGroup `%s` was not found.', experiment_group_id)
return None
def get_running_experiment_group(experiment_group_id):
experiment_group = get_valid_experiment_group(experiment_group_id=experiment_group_id)
if not experiment_group or not experiment_group.is_running:
_logger.info('ExperimentGroup `%s` is not running.', experiment_group_id)
return None
return experiment_group
|
import logging
from db.models.experiment_groups import ExperimentGroup
_logger = logging.getLogger('polyaxon.db')
def get_valid_experiment_group(experiment_group_id):
try:
return ExperimentGroup.objects.get(id=experiment_group_id)
except ExperimentGroup.DoesNotExist:
_logger.info('ExperimentGroup `%s` was not found.', experiment_group_id)
return None
def get_running_experiment_group(experiment_group_id):
experiment_group = get_valid_experiment_group(experiment_group_id=experiment_group_id)
- if not experiment_group.is_running:
+ if not experiment_group or not experiment_group.is_running:
? ++++++++++++++++++++++++
_logger.info('ExperimentGroup `%s` is not running.', experiment_group_id)
return None
return experiment_group
|
b57a2e184e3861617c12801529295b0095257cd9
|
petition/resources.py
|
petition/resources.py
|
from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
|
from import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
Fix swappable model in signature export
|
Fix swappable model in signature export
|
Python
|
mit
|
watchdogpolska/django-one-petition,watchdogpolska/django-one-petition,watchdogpolska/django-one-petition
|
from import_export import resources
- from .models import Signature
+ import swapper
+
+ Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
- exclude = ('created_on', 'modified_on')
model = Signature
|
Fix swappable model in signature export
|
## Code Before:
from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
## Instruction:
Fix swappable model in signature export
## Code After:
from import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
from import_export import resources
- from .models import Signature
+ import swapper
+
+ Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
- exclude = ('created_on', 'modified_on')
model = Signature
|
ba34ea366d8ee9ac47f1bb3044ad04dcd482c6eb
|
cybox/test/objects/win_mailslot_test.py
|
cybox/test/objects/win_mailslot_test.py
|
import unittest
from cybox.objects.win_mailslot_object import WinMailslot
from cybox.test.objects import ObjectTestCase
class TestWinMailslot(ObjectTestCase, unittest.TestCase):
object_type = "WindowsMailslotObjectType"
klass = WinMailslot
_full_dict = {
'handle': [
{
'name': "First Mailslot Handle",
'type': "Mailslot",
'xsi:type': "WindowsHandleObjectType",
},
{
'name': "Second Mailslot Handle",
'xsi:type': "WindowsHandleObjectType",
},
],
'max_message_size': 1024,
'name': "My Mailslot",
'read_timeout': 2000,
'security_attributes': "SecAttributes",
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
import unittest
from cybox.objects.win_mailslot_object import WinMailslot
from cybox.test.objects import ObjectTestCase
class TestWinMailslot(ObjectTestCase, unittest.TestCase):
object_type = "WindowsMailslotObjectType"
klass = WinMailslot
_full_dict = {
'handle': {
'name': "First Mailslot Handle",
'type': "Mailslot",
'xsi:type': "WindowsHandleObjectType",
},
'max_message_size': 1024,
'name': "My Mailslot",
'read_timeout': 2000,
'security_attributes': "SecAttributes",
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
Fix WinMailslot object to only use a single handle rather than a list
|
Fix WinMailslot object to only use a single handle rather than a list
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
import unittest
from cybox.objects.win_mailslot_object import WinMailslot
from cybox.test.objects import ObjectTestCase
class TestWinMailslot(ObjectTestCase, unittest.TestCase):
object_type = "WindowsMailslotObjectType"
klass = WinMailslot
_full_dict = {
- 'handle': [
+ 'handle': {
- {
- 'name': "First Mailslot Handle",
+ 'name': "First Mailslot Handle",
- 'type': "Mailslot",
+ 'type': "Mailslot",
- 'xsi:type': "WindowsHandleObjectType",
+ 'xsi:type': "WindowsHandleObjectType",
- },
- {
- 'name': "Second Mailslot Handle",
- 'xsi:type': "WindowsHandleObjectType",
- },
- ],
+ },
'max_message_size': 1024,
'name': "My Mailslot",
'read_timeout': 2000,
'security_attributes': "SecAttributes",
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
Fix WinMailslot object to only use a single handle rather than a list
|
## Code Before:
import unittest
from cybox.objects.win_mailslot_object import WinMailslot
from cybox.test.objects import ObjectTestCase
class TestWinMailslot(ObjectTestCase, unittest.TestCase):
object_type = "WindowsMailslotObjectType"
klass = WinMailslot
_full_dict = {
'handle': [
{
'name': "First Mailslot Handle",
'type': "Mailslot",
'xsi:type': "WindowsHandleObjectType",
},
{
'name': "Second Mailslot Handle",
'xsi:type': "WindowsHandleObjectType",
},
],
'max_message_size': 1024,
'name': "My Mailslot",
'read_timeout': 2000,
'security_attributes': "SecAttributes",
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
## Instruction:
Fix WinMailslot object to only use a single handle rather than a list
## Code After:
import unittest
from cybox.objects.win_mailslot_object import WinMailslot
from cybox.test.objects import ObjectTestCase
class TestWinMailslot(ObjectTestCase, unittest.TestCase):
object_type = "WindowsMailslotObjectType"
klass = WinMailslot
_full_dict = {
'handle': {
'name': "First Mailslot Handle",
'type': "Mailslot",
'xsi:type': "WindowsHandleObjectType",
},
'max_message_size': 1024,
'name': "My Mailslot",
'read_timeout': 2000,
'security_attributes': "SecAttributes",
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
import unittest
from cybox.objects.win_mailslot_object import WinMailslot
from cybox.test.objects import ObjectTestCase
class TestWinMailslot(ObjectTestCase, unittest.TestCase):
object_type = "WindowsMailslotObjectType"
klass = WinMailslot
_full_dict = {
- 'handle': [
? ^
+ 'handle': {
? ^
- {
- 'name': "First Mailslot Handle",
? ----
+ 'name': "First Mailslot Handle",
- 'type': "Mailslot",
? ----
+ 'type': "Mailslot",
- 'xsi:type': "WindowsHandleObjectType",
? ----
+ 'xsi:type': "WindowsHandleObjectType",
- },
- {
- 'name': "Second Mailslot Handle",
- 'xsi:type': "WindowsHandleObjectType",
- },
- ],
? ^
+ },
? ^
'max_message_size': 1024,
'name': "My Mailslot",
'read_timeout': 2000,
'security_attributes': "SecAttributes",
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
56f27099a8f7be39a6d8848a9378af6ed48f528f
|
bongo/apps/frontend/tests/templatetags_tests.py
|
bongo/apps/frontend/tests/templatetags_tests.py
|
from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
Fix broken test (not the intermittent one, this was just a dumb thing)
|
Fix broken test (not the intermittent one, this was just a dumb thing)
|
Python
|
mit
|
BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo
|
from django.test import TestCase
from django.conf import settings
+ from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
- self.assertEqual(rendered, article.__dict__)
+ self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
Fix broken test (not the intermittent one, this was just a dumb thing)
|
## Code Before:
from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
## Instruction:
Fix broken test (not the intermittent one, this was just a dumb thing)
## Code After:
from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
from django.test import TestCase
from django.conf import settings
+ from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
- self.assertEqual(rendered, article.__dict__)
+ self.assertEqual(rendered, escape(article.__dict__))
? +++++++ +
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
83eef98bd8cf36e62718c60f2bba71337a9a9ea0
|
kolibri/plugins/coach/kolibri_plugin.py
|
kolibri/plugins/coach/kolibri_plugin.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
Remove undefined import in coach plugin.
|
Remove undefined import in coach plugin.
|
Python
|
mit
|
indirectlylit/kolibri,learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,mrpau/kolibri,learningequality/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
- from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
-
- class CoachInclusionHook(hooks.CoachSyncHook):
- bundle_class = CoachAsset
-
|
Remove undefined import in coach plugin.
|
## Code Before:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
## Instruction:
Remove undefined import in coach plugin.
## Code After:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
- from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
-
-
- class CoachInclusionHook(hooks.CoachSyncHook):
- bundle_class = CoachAsset
|
a30e51ccb74bc55924be6f7f79dc4b6038c9b457
|
altair/examples/bar_chart_with_highlighted_segment.py
|
altair/examples/bar_chart_with_highlighted_segment.py
|
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
|
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
|
Move bar chart with highlighted segment chart into the bar charts section
|
Move bar chart with highlighted segment chart into the bar charts section
|
Python
|
bsd-3-clause
|
altair-viz/altair
|
+ # category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
|
Move bar chart with highlighted segment chart into the bar charts section
|
## Code Before:
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
## Instruction:
Move bar chart with highlighted segment chart into the bar charts section
## Code After:
# category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
|
+ # category: bar charts
import altair as alt
import pandas as pd
from vega_datasets import data
source = data.wheat()
threshold = pd.DataFrame([{"threshold": 90}])
bars = alt.Chart(source).mark_bar().encode(
x="year:O",
y="wheat:Q",
)
highlight = alt.Chart(source).mark_bar(color="#e45755").encode(
x='year:O',
y='baseline:Q',
y2='wheat:Q'
).transform_filter(
alt.datum.wheat > 90
).transform_calculate("baseline", "90")
rule = alt.Chart(threshold).mark_rule().encode(
y='threshold:Q'
)
(bars + highlight + rule).properties(width=600)
|
9261db252969c69ede633d4a4c02bb87c7bc1434
|
quilt/__init__.py
|
quilt/__init__.py
|
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
|
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
|
Add docstring for main module
|
Add docstring for main module
|
Python
|
mit
|
bjoernricks/python-quilt,vadmium/python-quilt
|
+
+ """ A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
+
|
Add docstring for main module
|
## Code Before:
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
## Instruction:
Add docstring for main module
## Code After:
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
|
+
+ """ A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
+
|
216a9176ecf395a7461c6f8ec926d48fa1634bad
|
manager/__init__.py
|
manager/__init__.py
|
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
'bootstrap/dist/css/bootstrap.css',
'bootstrap/dist/css/bootstrap-theme.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
|
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
'bootswatch/sandstone/bootstrap.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
|
Change theme to sandstone (bootswatch)
|
Change theme to sandstone (bootswatch)
|
Python
|
mit
|
hreeder/ignition,hreeder/ignition,hreeder/ignition
|
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
- 'bootstrap/dist/css/bootstrap.css',
+ 'bootswatch/sandstone/bootstrap.css',
- 'bootstrap/dist/css/bootstrap-theme.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
|
Change theme to sandstone (bootswatch)
|
## Code Before:
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
'bootstrap/dist/css/bootstrap.css',
'bootstrap/dist/css/bootstrap-theme.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
## Instruction:
Change theme to sandstone (bootswatch)
## Code After:
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
'bootswatch/sandstone/bootstrap.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
|
import os
from flask import Flask
from flask.ext.assets import Bundle, Environment
app = Flask(__name__)
# Load the app config
app.config.from_object("config.Config")
assets = Environment(app)
assets.load_path = [
os.path.join(os.path.dirname(__file__), 'static'),
os.path.join(os.path.dirname(__file__), 'static', 'bower_components')
]
assets.register(
'js_all',
Bundle(
'jquery/dist/jquery.min.js',
'bootstrap/dist/js/bootstrap.min.js',
output='js_all.js'
)
)
assets.register(
'css_all',
Bundle(
- 'bootstrap/dist/css/bootstrap.css',
? ^ ^^ - ^^^^
+ 'bootswatch/sandstone/bootstrap.css',
? ++ ^^^^ ^ ^^^
- 'bootstrap/dist/css/bootstrap-theme.css',
'css/ignition.css',
output='css_all.css'
)
)
from manager.views import core
|
cca6a727063c63d78d61ee81c892811238139462
|
lame_test.py
|
lame_test.py
|
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
print attr
try:
f()
except TypeError:
f("query")
|
import datetime
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
dates = (datetime.datetime.now().year, datetime.datetime.now().date().isocalendar()[1])
args_map = {
'announcements_by_week': dates,
'menu_by_week': dates,
'notes_by_week': dates,
'course': ('CS', '486'),
'course_examschedule': ('CS', '486'),
'course_prerequistes': ('CS', '486'),
'course_schedule': ('CS', '486'),
'course_by_building_room': ('MC', '2038'),
'term_course_schedule': ('1141', 'CS', '486'),
'term_subject_schedule': ('1141', 'CS'),
}
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
print(attr)
try:
f()
except TypeError:
try:
args = ("query",)
if attr in args_map:
args = args_map[attr]
f(*args)
except Exception as e:
print(e.message)
except Exception as e:
print(e.message)
|
Update lame test to test on multiple parameters.
|
Update lame test to test on multiple parameters.
|
Python
|
mit
|
albertoconnor/uwaterlooapi
|
+ import datetime
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
+
+ dates = (datetime.datetime.now().year, datetime.datetime.now().date().isocalendar()[1])
+
+
+ args_map = {
+ 'announcements_by_week': dates,
+ 'menu_by_week': dates,
+ 'notes_by_week': dates,
+ 'course': ('CS', '486'),
+ 'course_examschedule': ('CS', '486'),
+ 'course_prerequistes': ('CS', '486'),
+ 'course_schedule': ('CS', '486'),
+ 'course_by_building_room': ('MC', '2038'),
+ 'term_course_schedule': ('1141', 'CS', '486'),
+ 'term_subject_schedule': ('1141', 'CS'),
+ }
+
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
- print attr
+ print(attr)
try:
f()
except TypeError:
- f("query")
+ try:
+ args = ("query",)
+ if attr in args_map:
+ args = args_map[attr]
+ f(*args)
+ except Exception as e:
+ print(e.message)
+ except Exception as e:
+ print(e.message)
|
Update lame test to test on multiple parameters.
|
## Code Before:
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
print attr
try:
f()
except TypeError:
f("query")
## Instruction:
Update lame test to test on multiple parameters.
## Code After:
import datetime
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
dates = (datetime.datetime.now().year, datetime.datetime.now().date().isocalendar()[1])
args_map = {
'announcements_by_week': dates,
'menu_by_week': dates,
'notes_by_week': dates,
'course': ('CS', '486'),
'course_examschedule': ('CS', '486'),
'course_prerequistes': ('CS', '486'),
'course_schedule': ('CS', '486'),
'course_by_building_room': ('MC', '2038'),
'term_course_schedule': ('1141', 'CS', '486'),
'term_subject_schedule': ('1141', 'CS'),
}
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
print(attr)
try:
f()
except TypeError:
try:
args = ("query",)
if attr in args_map:
args = args_map[attr]
f(*args)
except Exception as e:
print(e.message)
except Exception as e:
print(e.message)
|
+ import datetime
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
+
+ dates = (datetime.datetime.now().year, datetime.datetime.now().date().isocalendar()[1])
+
+
+ args_map = {
+ 'announcements_by_week': dates,
+ 'menu_by_week': dates,
+ 'notes_by_week': dates,
+ 'course': ('CS', '486'),
+ 'course_examschedule': ('CS', '486'),
+ 'course_prerequistes': ('CS', '486'),
+ 'course_schedule': ('CS', '486'),
+ 'course_by_building_room': ('MC', '2038'),
+ 'term_course_schedule': ('1141', 'CS', '486'),
+ 'term_subject_schedule': ('1141', 'CS'),
+ }
+
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
- print attr
? ^
+ print(attr)
? ^ +
try:
f()
except TypeError:
- f("query")
+ try:
+ args = ("query",)
+ if attr in args_map:
+ args = args_map[attr]
+ f(*args)
+ except Exception as e:
+ print(e.message)
+ except Exception as e:
+ print(e.message)
|
58f8f4881a9e97206ddf49ea6cfb7f48dd34bfb3
|
example/urls.py
|
example/urls.py
|
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r"^$", TemplateView.as_view(template_name="homepage.html")),
url(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
from django.urls import path, re_path
from django.views.generic import TemplateView
urlpatterns = [
path('', TemplateView.as_view(template_name="homepage.html")),
re_path(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
Python
|
bsd-3-clause
|
bashu/django-fancybox,bashu/django-fancybox
|
- from django.conf.urls import url
+ from django.urls import path, re_path
from django.views.generic import TemplateView
urlpatterns = [
- url(r"^$", TemplateView.as_view(template_name="homepage.html")),
+ path('', TemplateView.as_view(template_name="homepage.html")),
- url(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
+ re_path(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
## Code Before:
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r"^$", TemplateView.as_view(template_name="homepage.html")),
url(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
## Instruction:
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
## Code After:
from django.urls import path, re_path
from django.views.generic import TemplateView
urlpatterns = [
path('', TemplateView.as_view(template_name="homepage.html")),
re_path(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
- from django.conf.urls import url
+ from django.urls import path, re_path
from django.views.generic import TemplateView
urlpatterns = [
- url(r"^$", TemplateView.as_view(template_name="homepage.html")),
? ^^^ ^^^^^
+ path('', TemplateView.as_view(template_name="homepage.html")),
? ^^^^ ^^
- url(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
? - ^
+ re_path(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
? ^^^^^^
]
|
faf35a814d045ce3d71921ed0d4ac268d5a9811c
|
app/notify_client/provider_client.py
|
app/notify_client/provider_client.py
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
Add provider client method to get provider version history
|
Add provider client method to get provider version history
|
Python
|
mit
|
gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
+ def get_provider_versions(self, provider_id):
+ return self.get(
+ url='/provider-details/{}/versions'.format(provider_id)
+ )
+
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
Add provider client method to get provider version history
|
## Code Before:
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
## Instruction:
Add provider client method to get provider version history
## Code After:
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
+ def get_provider_versions(self, provider_id):
+ return self.get(
+ url='/provider-details/{}/versions'.format(provider_id)
+ )
+
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
43e6a2e3bf90f5edee214d1511a6805a67f79595
|
stl/__init__.py
|
stl/__init__.py
|
import stl.ascii
import stl.binary
def parse_ascii_file(file):
return stl.ascii.parse(file)
def parse_binary_file(file):
return stl.binary.parse(file)
def parse_ascii_string(data):
from StringIO import StringIO
return parse_ascii_file(StringIO(data))
def parse_binary_string(data):
from StringIO import StringIO
return parse_binary_file(StringIO(data))
|
import stl.ascii
import stl.binary
def read_ascii_file(file):
return stl.ascii.parse(file)
def read_binary_file(file):
return stl.binary.parse(file)
def read_ascii_string(data):
from StringIO import StringIO
return parse_ascii_file(StringIO(data))
def read_binary_string(data):
from StringIO import StringIO
return parse_binary_file(StringIO(data))
|
Rename the reading functions "read_" rather than "parse_".
|
Rename the reading functions "read_" rather than "parse_".
"Parsing" is what they do internally, but "read" is a better opposite to
"write" and matches the name of the underlying raw file operation.
|
Python
|
mit
|
apparentlymart/python-stl,zachwick/python-stl,ng110/python-stl
|
import stl.ascii
import stl.binary
- def parse_ascii_file(file):
+ def read_ascii_file(file):
return stl.ascii.parse(file)
- def parse_binary_file(file):
+ def read_binary_file(file):
return stl.binary.parse(file)
- def parse_ascii_string(data):
+ def read_ascii_string(data):
from StringIO import StringIO
return parse_ascii_file(StringIO(data))
- def parse_binary_string(data):
+ def read_binary_string(data):
from StringIO import StringIO
return parse_binary_file(StringIO(data))
|
Rename the reading functions "read_" rather than "parse_".
|
## Code Before:
import stl.ascii
import stl.binary
def parse_ascii_file(file):
return stl.ascii.parse(file)
def parse_binary_file(file):
return stl.binary.parse(file)
def parse_ascii_string(data):
from StringIO import StringIO
return parse_ascii_file(StringIO(data))
def parse_binary_string(data):
from StringIO import StringIO
return parse_binary_file(StringIO(data))
## Instruction:
Rename the reading functions "read_" rather than "parse_".
## Code After:
import stl.ascii
import stl.binary
def read_ascii_file(file):
return stl.ascii.parse(file)
def read_binary_file(file):
return stl.binary.parse(file)
def read_ascii_string(data):
from StringIO import StringIO
return parse_ascii_file(StringIO(data))
def read_binary_string(data):
from StringIO import StringIO
return parse_binary_file(StringIO(data))
|
import stl.ascii
import stl.binary
- def parse_ascii_file(file):
? ^ ^^^
+ def read_ascii_file(file):
? ^^ ^
return stl.ascii.parse(file)
- def parse_binary_file(file):
? ^ ^^^
+ def read_binary_file(file):
? ^^ ^
return stl.binary.parse(file)
- def parse_ascii_string(data):
? ^ ^^^
+ def read_ascii_string(data):
? ^^ ^
from StringIO import StringIO
return parse_ascii_file(StringIO(data))
- def parse_binary_string(data):
? ^ ^^^
+ def read_binary_string(data):
? ^^ ^
from StringIO import StringIO
return parse_binary_file(StringIO(data))
|
ccbc40f5bfa160a9e41de86fc4845d68da40b8c4
|
parse.py
|
parse.py
|
import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
|
import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
Update variable names, add better comments, convert to JSON.
|
Update variable names, add better comments, convert to JSON.
|
Python
|
isc
|
reillysiemens/macdb
|
import re
+ import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
- number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
+ oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
- oui_hash = hashlib.sha1()
+ request_hash = hashlib.sha1()
- companies = []
+ organizations = []
# Get the listing from the source location.
- req = requests.get(location)
+ request = requests.get(location)
# Update our hash object with the value from our request string.
- oui_hash.update(bytes(req.text, "utf-8"))
+ request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
- req_string = req.text[127:]
+ request_string = request.text[127:]
# Break the request string into a list of entries.
- entries = req_string.split('\r\n\r\n')
+ entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
+ # For each entry...
for entry in entries:
+
+ # Break the entry into lines.
lines = entry.split('\r\n')
- matches = number_name.search(lines[1])
- company = {'name': matches.group(2), 'oui': matches.group(1)}
- companies.append(company)
+ # Find the id and oui for the organization.
+ matches = oui_id.search(lines[1])
+
+ # Find the address for the organization.
+ address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
+
+ # Create a dictionary for the organization.
+ organization = {'id': matches.group(2),
+ 'oui': matches.group(1),
+ 'address': address}
+
+ # Append that dictionary to our list of organizations.
+ organizations.append(organization)
+
+ # Convert the list of organizations to a JSON formatted string.
+ json_organizations = json.dumps(organizations)
+ print(json_organizations)
+
|
Update variable names, add better comments, convert to JSON.
|
## Code Before:
import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
## Instruction:
Update variable names, add better comments, convert to JSON.
## Code After:
import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
import re
+ import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
- number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
? ^ ^^^^ ^^^^
+ oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
? ^ ^ ^^
- oui_hash = hashlib.sha1()
? ^ ^
+ request_hash = hashlib.sha1()
? ^^^ ^^^
- companies = []
+ organizations = []
# Get the listing from the source location.
- req = requests.get(location)
+ request = requests.get(location)
? ++++
# Update our hash object with the value from our request string.
- oui_hash.update(bytes(req.text, "utf-8"))
? ^ ^
+ request_hash.update(bytes(request.text, "utf-8"))
? ^^^ ^^^ ++++
# Ignore the first 127 characters of junk data.
- req_string = req.text[127:]
+ request_string = request.text[127:]
? ++++ ++++
# Break the request string into a list of entries.
- entries = req_string.split('\r\n\r\n')
+ entries = request_string.split('\r\n\r\n')
? ++++
# Remove junk entry at the end.
del entries[-1]
+ # For each entry...
for entry in entries:
+
+ # Break the entry into lines.
lines = entry.split('\r\n')
+
+ # Find the id and oui for the organization.
- matches = number_name.search(lines[1])
? ^ ^^^^ ^^^^
+ matches = oui_id.search(lines[1])
? ^ ^ ^^
- company = {'name': matches.group(2), 'oui': matches.group(1)}
- companies.append(company)
+
+ # Find the address for the organization.
+ address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
+
+ # Create a dictionary for the organization.
+ organization = {'id': matches.group(2),
+ 'oui': matches.group(1),
+ 'address': address}
+
+ # Append that dictionary to our list of organizations.
+ organizations.append(organization)
+
+ # Convert the list of organizations to a JSON formatted string.
+ json_organizations = json.dumps(organizations)
+ print(json_organizations)
|
2515b3402c671c2949e5f3c712cb284777f2accf
|
examples/boilerplates/base_test_case.py
|
examples/boilerplates/base_test_case.py
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
Update boilerplate to save a screenshot before the tearDown()
|
Update boilerplate to save a screenshot before the tearDown()
|
Python
|
mit
|
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
- # Add custom setUp code for your tests AFTER the super().setUp()
+ # <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
+ self.save_teardown_screenshot()
- # Add custom tearDown code for your tests BEFORE the super().tearDown()
+ # <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
Update boilerplate to save a screenshot before the tearDown()
|
## Code Before:
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
## Instruction:
Update boilerplate to save a screenshot before the tearDown()
## Code After:
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
- # Add custom setUp code for your tests AFTER the super().setUp()
? -----
+ # <<< Add custom setUp code for tests AFTER the super().setUp() >>>
? ++++ ++++
def tearDown(self):
+ self.save_teardown_screenshot()
- # Add custom tearDown code for your tests BEFORE the super().tearDown()
? ---------------
+ # <<< Add custom tearDown code BEFORE the super().tearDown() >>>
? ++++ ++++
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
fa0d138ce465efdd630b83ba4a7ee10888a68b4a
|
alg_factorial.py
|
alg_factorial.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
"""Get the nth number of Fibonacci series, Fn, by recursion."""
pass
def factorial_memo(n):
"""Get the nth number of Fibonacci series, Fn, by memorization."""
pass
def factorial_dp(n):
"""Get the nth number of Fibonacci series by dynamic programming."""
pass
def main():
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
"""Get the nth number of factorial series, Fn, by recursion.
- Time complexity: Fn - 1 = O(Fn); too fast.
- Space complexity: O(n).
"""
if n <= 1:
return 1
else:
return n * factorial_recur(n - 1)
def factorial_memo(n):
"""Get the nth number of factorial series, Fn, by memorization.
- Time complexity: O(n).
- Space complexity: O(n).
"""
fn_d = {}
fn_d[0] = 1
fn_d[1] = 1
for n in xrange(2, n + 1):
fn_d[n] = n * fn_d[n - 1]
return fn_d[n]
def factorial_dp(n):
"""Get the nth number of factorial series by dynamic programming.
- Time complexity is still O(n), like factorial_memo().
- Space complexity is O(1), improving a lot.
"""
fn = 1
for i in xrange(2, n + 1):
fn *= i
return fn
def main():
import time
n = 10
print('{}th number of factorial series:'.format(n))
start_time = time.time()
print('By recursion: {}'.format(factorial_recur(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By memorization: {}'.format(factorial_memo(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By dynamic programming: {}'.format(factorial_dp(n)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Complete factorial_recur(), factorial_memo() & factorial_dp() from Hokaido
|
Complete factorial_recur(), factorial_memo() & factorial_dp() from Hokaido
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
- """Get the nth number of Fibonacci series, Fn, by recursion."""
+ """Get the nth number of factorial series, Fn, by recursion.
- pass
+
+ - Time complexity: Fn - 1 = O(Fn); too fast.
+ - Space complexity: O(n).
+ """
+ if n <= 1:
+ return 1
+ else:
+ return n * factorial_recur(n - 1)
def factorial_memo(n):
- """Get the nth number of Fibonacci series, Fn, by memorization."""
+ """Get the nth number of factorial series, Fn, by memorization.
- pass
-
+
+ - Time complexity: O(n).
+ - Space complexity: O(n).
+ """
+ fn_d = {}
+ fn_d[0] = 1
+ fn_d[1] = 1
+ for n in xrange(2, n + 1):
+ fn_d[n] = n * fn_d[n - 1]
+ return fn_d[n]
def factorial_dp(n):
- """Get the nth number of Fibonacci series by dynamic programming."""
+ """Get the nth number of factorial series by dynamic programming.
- pass
+
+ - Time complexity is still O(n), like factorial_memo().
+ - Space complexity is O(1), improving a lot.
+ """
+ fn = 1
+ for i in xrange(2, n + 1):
+ fn *= i
+ return fn
def main():
- pass
+ import time
+ n = 10
+
+ print('{}th number of factorial series:'.format(n))
+
+ start_time = time.time()
+ print('By recursion: {}'.format(factorial_recur(n)))
+ print('Time: {}'.format(time.time() - start_time))
+
+ start_time = time.time()
+ print('By memorization: {}'.format(factorial_memo(n)))
+ print('Time: {}'.format(time.time() - start_time))
+
+ start_time = time.time()
+ print('By dynamic programming: {}'.format(factorial_dp(n)))
+ print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Complete factorial_recur(), factorial_memo() & factorial_dp() from Hokaido
|
## Code Before:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
"""Get the nth number of Fibonacci series, Fn, by recursion."""
pass
def factorial_memo(n):
"""Get the nth number of Fibonacci series, Fn, by memorization."""
pass
def factorial_dp(n):
"""Get the nth number of Fibonacci series by dynamic programming."""
pass
def main():
pass
if __name__ == '__main__':
main()
## Instruction:
Complete factorial_recur(), factorial_memo() & factorial_dp() from Hokaido
## Code After:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
"""Get the nth number of factorial series, Fn, by recursion.
- Time complexity: Fn - 1 = O(Fn); too fast.
- Space complexity: O(n).
"""
if n <= 1:
return 1
else:
return n * factorial_recur(n - 1)
def factorial_memo(n):
"""Get the nth number of factorial series, Fn, by memorization.
- Time complexity: O(n).
- Space complexity: O(n).
"""
fn_d = {}
fn_d[0] = 1
fn_d[1] = 1
for n in xrange(2, n + 1):
fn_d[n] = n * fn_d[n - 1]
return fn_d[n]
def factorial_dp(n):
"""Get the nth number of factorial series by dynamic programming.
- Time complexity is still O(n), like factorial_memo().
- Space complexity is O(1), improving a lot.
"""
fn = 1
for i in xrange(2, n + 1):
fn *= i
return fn
def main():
import time
n = 10
print('{}th number of factorial series:'.format(n))
start_time = time.time()
print('By recursion: {}'.format(factorial_recur(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By memorization: {}'.format(factorial_memo(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By dynamic programming: {}'.format(factorial_dp(n)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
- """Get the nth number of Fibonacci series, Fn, by recursion."""
? ^^^^^ ^ ---
+ """Get the nth number of factorial series, Fn, by recursion.
? ^ ^^^ ++
- pass
+
+ - Time complexity: Fn - 1 = O(Fn); too fast.
+ - Space complexity: O(n).
+ """
+ if n <= 1:
+ return 1
+ else:
+ return n * factorial_recur(n - 1)
def factorial_memo(n):
- """Get the nth number of Fibonacci series, Fn, by memorization."""
? ^^^^^ ^ ---
+ """Get the nth number of factorial series, Fn, by memorization.
? ^ ^^^ ++
- pass
-
+
+ - Time complexity: O(n).
+ - Space complexity: O(n).
+ """
+ fn_d = {}
+ fn_d[0] = 1
+ fn_d[1] = 1
+ for n in xrange(2, n + 1):
+ fn_d[n] = n * fn_d[n - 1]
+ return fn_d[n]
def factorial_dp(n):
- """Get the nth number of Fibonacci series by dynamic programming."""
? ^^^^^ ^ ---
+ """Get the nth number of factorial series by dynamic programming.
? ^ ^^^ ++
- pass
+
+ - Time complexity is still O(n), like factorial_memo().
+ - Space complexity is O(1), improving a lot.
+ """
+ fn = 1
+ for i in xrange(2, n + 1):
+ fn *= i
+ return fn
def main():
- pass
+ import time
+ n = 10
+
+ print('{}th number of factorial series:'.format(n))
+
+ start_time = time.time()
+ print('By recursion: {}'.format(factorial_recur(n)))
+ print('Time: {}'.format(time.time() - start_time))
+
+ start_time = time.time()
+ print('By memorization: {}'.format(factorial_memo(n)))
+ print('Time: {}'.format(time.time() - start_time))
+
+ start_time = time.time()
+ print('By dynamic programming: {}'.format(factorial_dp(n)))
+ print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
e16b2de7dd7c6e0df100bba08d3a7465bbbb4424
|
tests/test_service.py
|
tests/test_service.py
|
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import serialization, hashes
import requests
import base64
import unittest
import os
class TestPosieService(unittest.TestCase):
POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000')
key_url = "{}/key".format(POSIE_URL)
import_url = "{}/decrypt".format(POSIE_URL)
public_key = ""
def setUp(self):
# Load public der key from http endpoint
r = requests.get(self.key_url)
key_string = base64.b64decode(r.text)
self.public_key = serialization.load_der_public_key(
key_string,
backend=default_backend()
)
def send_message(self, message):
ciphertext = self.public_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
# Ask posie to decode message
r = requests.post(self.import_url, data=base64.b64encode(ciphertext))
return r
def test_decrypt_fail_sends_400(self):
# Ask posie to decode message
r = requests.post(self.import_url, data='rubbish')
self.assertEqual(r.status_code, 400)
def test_no_content_sends_400(self):
# Ask posie to decode message
r = requests.post(self.import_url, data='')
self.assertEqual(r.status_code, 400)
def test_decrypts_message(self):
# Encrypt a message with the key
message = b"Some encrypted message"
# Ask posie to decode message
r = self.send_message(message)
# Compare to bytestring version of decrypted data
self.assertEqual(str.encode(r.text), message)
|
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
import base64
import unittest
import sys
import os
sys.path.append(os.path.abspath('../server.py'))
import server
class TestPosieService(unittest.TestCase):
def test_key_generation(self):
# Load public der key from http endpoint
key_string = base64.b64decode(server.key())
public_key = serialization.load_der_public_key(
key_string,
backend=default_backend()
)
self.assertIsNotNone(public_key)
|
Remove requests and drop external tests (now in integration)
|
Remove requests and drop external tests (now in integration)
|
Python
|
mit
|
ONSdigital/edcdi
|
from cryptography.hazmat.backends import default_backend
- from cryptography.hazmat.primitives.asymmetric import padding
- from cryptography.hazmat.primitives import serialization, hashes
+ from cryptography.hazmat.primitives import serialization
- import requests
import base64
import unittest
+ import sys
import os
+
+ sys.path.append(os.path.abspath('../server.py'))
+
+ import server
class TestPosieService(unittest.TestCase):
+ def test_key_generation(self):
- POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000')
-
- key_url = "{}/key".format(POSIE_URL)
- import_url = "{}/decrypt".format(POSIE_URL)
- public_key = ""
-
- def setUp(self):
# Load public der key from http endpoint
- r = requests.get(self.key_url)
+ key_string = base64.b64decode(server.key())
- key_string = base64.b64decode(r.text)
-
- self.public_key = serialization.load_der_public_key(
+ public_key = serialization.load_der_public_key(
key_string,
backend=default_backend()
)
- def send_message(self, message):
+ self.assertIsNotNone(public_key)
- ciphertext = self.public_key.encrypt(
- message,
- padding.OAEP(
- mgf=padding.MGF1(algorithm=hashes.SHA1()),
- algorithm=hashes.SHA1(),
- label=None
- )
- )
-
- # Ask posie to decode message
- r = requests.post(self.import_url, data=base64.b64encode(ciphertext))
-
- return r
-
- def test_decrypt_fail_sends_400(self):
-
- # Ask posie to decode message
- r = requests.post(self.import_url, data='rubbish')
-
- self.assertEqual(r.status_code, 400)
-
- def test_no_content_sends_400(self):
-
- # Ask posie to decode message
- r = requests.post(self.import_url, data='')
-
- self.assertEqual(r.status_code, 400)
-
- def test_decrypts_message(self):
- # Encrypt a message with the key
- message = b"Some encrypted message"
-
- # Ask posie to decode message
- r = self.send_message(message)
-
- # Compare to bytestring version of decrypted data
- self.assertEqual(str.encode(r.text), message)
-
|
Remove requests and drop external tests (now in integration)
|
## Code Before:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import serialization, hashes
import requests
import base64
import unittest
import os
class TestPosieService(unittest.TestCase):
POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000')
key_url = "{}/key".format(POSIE_URL)
import_url = "{}/decrypt".format(POSIE_URL)
public_key = ""
def setUp(self):
# Load public der key from http endpoint
r = requests.get(self.key_url)
key_string = base64.b64decode(r.text)
self.public_key = serialization.load_der_public_key(
key_string,
backend=default_backend()
)
def send_message(self, message):
ciphertext = self.public_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
# Ask posie to decode message
r = requests.post(self.import_url, data=base64.b64encode(ciphertext))
return r
def test_decrypt_fail_sends_400(self):
# Ask posie to decode message
r = requests.post(self.import_url, data='rubbish')
self.assertEqual(r.status_code, 400)
def test_no_content_sends_400(self):
# Ask posie to decode message
r = requests.post(self.import_url, data='')
self.assertEqual(r.status_code, 400)
def test_decrypts_message(self):
# Encrypt a message with the key
message = b"Some encrypted message"
# Ask posie to decode message
r = self.send_message(message)
# Compare to bytestring version of decrypted data
self.assertEqual(str.encode(r.text), message)
## Instruction:
Remove requests and drop external tests (now in integration)
## Code After:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
import base64
import unittest
import sys
import os
sys.path.append(os.path.abspath('../server.py'))
import server
class TestPosieService(unittest.TestCase):
def test_key_generation(self):
# Load public der key from http endpoint
key_string = base64.b64decode(server.key())
public_key = serialization.load_der_public_key(
key_string,
backend=default_backend()
)
self.assertIsNotNone(public_key)
|
from cryptography.hazmat.backends import default_backend
- from cryptography.hazmat.primitives.asymmetric import padding
- from cryptography.hazmat.primitives import serialization, hashes
? --------
+ from cryptography.hazmat.primitives import serialization
- import requests
import base64
import unittest
+ import sys
import os
+
+ sys.path.append(os.path.abspath('../server.py'))
+
+ import server
class TestPosieService(unittest.TestCase):
+ def test_key_generation(self):
- POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000')
-
- key_url = "{}/key".format(POSIE_URL)
- import_url = "{}/decrypt".format(POSIE_URL)
- public_key = ""
-
- def setUp(self):
# Load public der key from http endpoint
- r = requests.get(self.key_url)
+ key_string = base64.b64decode(server.key())
- key_string = base64.b64decode(r.text)
-
- self.public_key = serialization.load_der_public_key(
? -----
+ public_key = serialization.load_der_public_key(
key_string,
backend=default_backend()
)
+ self.assertIsNotNone(public_key)
- def send_message(self, message):
-
- ciphertext = self.public_key.encrypt(
- message,
- padding.OAEP(
- mgf=padding.MGF1(algorithm=hashes.SHA1()),
- algorithm=hashes.SHA1(),
- label=None
- )
- )
-
- # Ask posie to decode message
- r = requests.post(self.import_url, data=base64.b64encode(ciphertext))
-
- return r
-
- def test_decrypt_fail_sends_400(self):
-
- # Ask posie to decode message
- r = requests.post(self.import_url, data='rubbish')
-
- self.assertEqual(r.status_code, 400)
-
- def test_no_content_sends_400(self):
-
- # Ask posie to decode message
- r = requests.post(self.import_url, data='')
-
- self.assertEqual(r.status_code, 400)
-
- def test_decrypts_message(self):
- # Encrypt a message with the key
- message = b"Some encrypted message"
-
- # Ask posie to decode message
- r = self.send_message(message)
-
- # Compare to bytestring version of decrypted data
- self.assertEqual(str.encode(r.text), message)
|
3cc8f2f212199d956c0132cc0aa12bd33e94e8dc
|
tests/drivers/test-facets.py
|
tests/drivers/test-facets.py
|
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
self.assertEqual(v, type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
|
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
self.assertEqual(v.typeDefinition(), type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
|
Change to support new binding model
|
Change to support new binding model
|
Python
|
apache-2.0
|
jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,balanced/PyXB,jonfoster/pyxb1,jonfoster/pyxb-upstream-mirror,balanced/PyXB,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,pabigot/pyxb,jonfoster/pyxb2,balanced/PyXB,CantemoInternal/pyxb,pabigot/pyxb,jonfoster/pyxb1,CantemoInternal/pyxb,jonfoster/pyxb2
|
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
- self.assertEqual(v, type(instance))
+ self.assertEqual(v.typeDefinition(), type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
|
Change to support new binding model
|
## Code Before:
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
self.assertEqual(v, type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
## Instruction:
Change to support new binding model
## Code After:
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
self.assertEqual(v.typeDefinition(), type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
|
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
- self.assertEqual(v, type(instance))
+ self.assertEqual(v.typeDefinition(), type(instance))
? +++++++++++++++++
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
|
543c7307f26553d78bf3f18b5f93a2bc23cfb875
|
reports/admin.py
|
reports/admin.py
|
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at']
search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
|
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at', 'content']
search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
|
Fix some issues, because of models change
|
Fix some issues, because of models change
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
- list_filter = ['created_at']
+ list_filter = ['created_at', 'content']
- search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
+ search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
|
Fix some issues, because of models change
|
## Code Before:
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at']
search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
## Instruction:
Fix some issues, because of models change
## Code After:
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
list_filter = ['created_at', 'content']
search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
admin.site.register(Report, ReportAdmin)
|
from django.contrib import admin
from .models import Report
class ReportAdmin(admin.ModelAdmin):
list_display = ('addressed_to', 'reported_from', 'content', 'signed_from', 'get_copies', 'created_at')
- list_filter = ['created_at']
+ list_filter = ['created_at', 'content']
? +++++++++++
- search_fields = ['addressed_to', 'reported_from', 'content', 'signed_from']
+ search_fields = ['addressed_to', 'reported_from__username', 'content', 'signed_from']
? ++++++++++
admin.site.register(Report, ReportAdmin)
|
1821577ca19bb05847c37d856896d8e1ce8b3acb
|
plugins/religion.py
|
plugins/religion.py
|
from util import hook, http
@hook.command('god')
@hook.command
def bible(inp):
".bible <passage> -- gets <passage> from the Bible (ESV)"
base_url = ('http://www.esvapi.org/v2/rest/passageQuery?key=IP&'
'output-format=plain-text&include-heading-horizontal-lines&'
'include-headings=false&include-passage-horizontal-lines=false&'
'include-passage-references=false&include-short-copyright=false&'
'include-footnotes=false&line-length=0&'
'include-heading-horizontal-lines=false')
text = http.get(base_url, passage=inp)
text = ' '.join(text.split())
if len(text) > 400:
text = text[:text.rfind(' ', 0, 400)] + '...'
return text
@hook.command('allah')
@hook.command
def koran(inp): # Koran look-up plugin by Ghetto Wizard
".koran <chapter.verse> -- gets <chapter.verse> from the Koran"
url = 'http://quod.lib.umich.edu/cgi/k/koran/koran-idx?type=simple'
results = http.get_html(url, q1=inp).xpath('//li')
if not results:
return 'No results for ' + inp
return results[0].text_content()
|
from util import hook, http
# https://api.esv.org/account/create-application/
@hook.api_key('bible')
@hook.command('god')
@hook.command
def bible(inp, api_key=None):
".bible <passage> -- gets <passage> from the Bible (ESV)"
base_url = ('https://api.esv.org/v3/passage/text/?'
'include-headings=false&'
'include-passage-horizontal-lines=false&'
'include-heading-horizontal-lines=false&'
'include-passage-references=false&'
'include-short-copyright=false&'
'include-footnotes=false&'
)
text = http.get_json(base_url, q=inp,
headers={'Authorization': 'Token ' + api_key})
text = ' '.join(text['passages']).strip()
if len(text) > 400:
text = text[:text.rfind(' ', 0, 400)] + '...'
return text
@hook.command('allah')
@hook.command
def koran(inp): # Koran look-up plugin by Ghetto Wizard
".koran <chapter.verse> -- gets <chapter.verse> from the Koran"
url = 'http://quod.lib.umich.edu/cgi/k/koran/koran-idx?type=simple'
results = http.get_html(url, q1=inp).xpath('//li')
if not results:
return 'No results for ' + inp
return results[0].text_content()
|
Fix .bible: v2 was deprecated, the v3 API requires a key.
|
Fix .bible: v2 was deprecated, the v3 API requires a key.
|
Python
|
unlicense
|
parkrrr/skybot,TeamPeggle/ppp-helpdesk,crisisking/skybot,jmgao/skybot,rmmh/skybot
|
from util import hook, http
-
+ # https://api.esv.org/account/create-application/
+ @hook.api_key('bible')
@hook.command('god')
@hook.command
- def bible(inp):
+ def bible(inp, api_key=None):
".bible <passage> -- gets <passage> from the Bible (ESV)"
- base_url = ('http://www.esvapi.org/v2/rest/passageQuery?key=IP&'
- 'output-format=plain-text&include-heading-horizontal-lines&'
+ base_url = ('https://api.esv.org/v3/passage/text/?'
+ 'include-headings=false&'
- 'include-headings=false&include-passage-horizontal-lines=false&'
+ 'include-passage-horizontal-lines=false&'
- 'include-passage-references=false&include-short-copyright=false&'
- 'include-footnotes=false&line-length=0&'
- 'include-heading-horizontal-lines=false')
+ 'include-heading-horizontal-lines=false&'
+ 'include-passage-references=false&'
+ 'include-short-copyright=false&'
+ 'include-footnotes=false&'
+ )
- text = http.get(base_url, passage=inp)
+ text = http.get_json(base_url, q=inp,
+ headers={'Authorization': 'Token ' + api_key})
- text = ' '.join(text.split())
+ text = ' '.join(text['passages']).strip()
if len(text) > 400:
text = text[:text.rfind(' ', 0, 400)] + '...'
return text
@hook.command('allah')
@hook.command
def koran(inp): # Koran look-up plugin by Ghetto Wizard
".koran <chapter.verse> -- gets <chapter.verse> from the Koran"
url = 'http://quod.lib.umich.edu/cgi/k/koran/koran-idx?type=simple'
results = http.get_html(url, q1=inp).xpath('//li')
if not results:
return 'No results for ' + inp
return results[0].text_content()
|
Fix .bible: v2 was deprecated, the v3 API requires a key.
|
## Code Before:
from util import hook, http
@hook.command('god')
@hook.command
def bible(inp):
".bible <passage> -- gets <passage> from the Bible (ESV)"
base_url = ('http://www.esvapi.org/v2/rest/passageQuery?key=IP&'
'output-format=plain-text&include-heading-horizontal-lines&'
'include-headings=false&include-passage-horizontal-lines=false&'
'include-passage-references=false&include-short-copyright=false&'
'include-footnotes=false&line-length=0&'
'include-heading-horizontal-lines=false')
text = http.get(base_url, passage=inp)
text = ' '.join(text.split())
if len(text) > 400:
text = text[:text.rfind(' ', 0, 400)] + '...'
return text
@hook.command('allah')
@hook.command
def koran(inp): # Koran look-up plugin by Ghetto Wizard
".koran <chapter.verse> -- gets <chapter.verse> from the Koran"
url = 'http://quod.lib.umich.edu/cgi/k/koran/koran-idx?type=simple'
results = http.get_html(url, q1=inp).xpath('//li')
if not results:
return 'No results for ' + inp
return results[0].text_content()
## Instruction:
Fix .bible: v2 was deprecated, the v3 API requires a key.
## Code After:
from util import hook, http
# https://api.esv.org/account/create-application/
@hook.api_key('bible')
@hook.command('god')
@hook.command
def bible(inp, api_key=None):
".bible <passage> -- gets <passage> from the Bible (ESV)"
base_url = ('https://api.esv.org/v3/passage/text/?'
'include-headings=false&'
'include-passage-horizontal-lines=false&'
'include-heading-horizontal-lines=false&'
'include-passage-references=false&'
'include-short-copyright=false&'
'include-footnotes=false&'
)
text = http.get_json(base_url, q=inp,
headers={'Authorization': 'Token ' + api_key})
text = ' '.join(text['passages']).strip()
if len(text) > 400:
text = text[:text.rfind(' ', 0, 400)] + '...'
return text
@hook.command('allah')
@hook.command
def koran(inp): # Koran look-up plugin by Ghetto Wizard
".koran <chapter.verse> -- gets <chapter.verse> from the Koran"
url = 'http://quod.lib.umich.edu/cgi/k/koran/koran-idx?type=simple'
results = http.get_html(url, q1=inp).xpath('//li')
if not results:
return 'No results for ' + inp
return results[0].text_content()
|
from util import hook, http
-
+ # https://api.esv.org/account/create-application/
+ @hook.api_key('bible')
@hook.command('god')
@hook.command
- def bible(inp):
+ def bible(inp, api_key=None):
".bible <passage> -- gets <passage> from the Bible (ESV)"
- base_url = ('http://www.esvapi.org/v2/rest/passageQuery?key=IP&'
- 'output-format=plain-text&include-heading-horizontal-lines&'
+ base_url = ('https://api.esv.org/v3/passage/text/?'
+ 'include-headings=false&'
- 'include-headings=false&include-passage-horizontal-lines=false&'
? -----------------------
+ 'include-passage-horizontal-lines=false&'
- 'include-passage-references=false&include-short-copyright=false&'
- 'include-footnotes=false&line-length=0&'
- 'include-heading-horizontal-lines=false')
? -
+ 'include-heading-horizontal-lines=false&'
? +
+ 'include-passage-references=false&'
+ 'include-short-copyright=false&'
+ 'include-footnotes=false&'
+ )
- text = http.get(base_url, passage=inp)
? ^^^^^^^ ^
+ text = http.get_json(base_url, q=inp,
? +++++ ^ ^
+ headers={'Authorization': 'Token ' + api_key})
- text = ' '.join(text.split())
+ text = ' '.join(text['passages']).strip()
if len(text) > 400:
text = text[:text.rfind(' ', 0, 400)] + '...'
return text
@hook.command('allah')
@hook.command
def koran(inp): # Koran look-up plugin by Ghetto Wizard
".koran <chapter.verse> -- gets <chapter.verse> from the Koran"
url = 'http://quod.lib.umich.edu/cgi/k/koran/koran-idx?type=simple'
results = http.get_html(url, q1=inp).xpath('//li')
if not results:
return 'No results for ' + inp
return results[0].text_content()
|
acfa9a206e803ad41565f6f3a678d9e1130948d8
|
test/runmain.py
|
test/runmain.py
|
import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
from test import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
Update run mainfile to reflect import paths
|
Update run mainfile to reflect import paths
|
Python
|
mit
|
bibsian/database-development
|
- import ui_logic_mainwindow as uv
+ from test import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
Update run mainfile to reflect import paths
|
## Code Before:
import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
## Instruction:
Update run mainfile to reflect import paths
## Code After:
from test import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
- import ui_logic_mainwindow as uv
+ from test import ui_logic_mainwindow as uv
? ++++++++++
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
49f3c5bf5b95a7d678e541d93e0999f37f8a2b26
|
students/admin.py
|
students/admin.py
|
from django.contrib import admin
from .models import WhitelistedUsername
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
admin.site.register(WhitelistedUsername, WhitelistedUsernameAdmin)
|
from django.contrib import admin
from .models import WhitelistedUsername
@admin.register(WhitelistedUsername)
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
|
Use class decorator instead of floating statement to register WhitelistedUsernameAdmin class.
|
Use class decorator instead of floating statement to register WhitelistedUsernameAdmin class.
|
Python
|
mit
|
muhummadPatel/raspied,muhummadPatel/raspied,muhummadPatel/raspied
|
from django.contrib import admin
from .models import WhitelistedUsername
+ @admin.register(WhitelistedUsername)
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
-
- admin.site.register(WhitelistedUsername, WhitelistedUsernameAdmin)
-
|
Use class decorator instead of floating statement to register WhitelistedUsernameAdmin class.
|
## Code Before:
from django.contrib import admin
from .models import WhitelistedUsername
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
admin.site.register(WhitelistedUsername, WhitelistedUsernameAdmin)
## Instruction:
Use class decorator instead of floating statement to register WhitelistedUsernameAdmin class.
## Code After:
from django.contrib import admin
from .models import WhitelistedUsername
@admin.register(WhitelistedUsername)
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
|
from django.contrib import admin
from .models import WhitelistedUsername
+ @admin.register(WhitelistedUsername)
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
-
-
- admin.site.register(WhitelistedUsername, WhitelistedUsernameAdmin)
|
fc7ad7d55622aa9edb77b9f7822260110a772805
|
db.py
|
db.py
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Bring DB script into line with other prod scripts
|
Bring DB script into line with other prod scripts
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
+ default_env_file = '/home/ubuntu/environment'
+ environment = 'live'
+
+ if os.path.isfile(default_env_file):
+ with open(default_env_file, 'r') as environment_file:
+ environment = environment_file.readline().strip()
+
+ # on aws get secrets and export to env
- secrets = getAllSecrets(region="eu-west-1")
+ os.environ.update(getAllSecrets(region="eu-west-1"))
- for key, val in secrets.items():
- os.environ[key] = val
+
+ from config import configs
+
+ os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Bring DB script into line with other prod scripts
|
## Code Before:
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
## Instruction:
Bring DB script into line with other prod scripts
## Code After:
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
+ default_env_file = '/home/ubuntu/environment'
+ environment = 'live'
+
+ if os.path.isfile(default_env_file):
+ with open(default_env_file, 'r') as environment_file:
+ environment = environment_file.readline().strip()
+
+ # on aws get secrets and export to env
- secrets = getAllSecrets(region="eu-west-1")
? ^ ^^^^^
+ os.environ.update(getAllSecrets(region="eu-west-1"))
? + + ^^^ ++++++++ ^ +
- for key, val in secrets.items():
- os.environ[key] = val
+
+ from config import configs
+
+ os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
e170a96859232d1436930be7a0cbfc7f2295c8a7
|
main.py
|
main.py
|
from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
self.numberRequests += 1
request.setHeader("content-type", "text/plain")
return "I am request #" + str(self.numberRequests) + "\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
|
from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
request.setHeader("content-type", "text/plain")
return "Welcome to teiler\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
|
Set root resource to welcome
|
Set root resource to welcome
|
Python
|
mit
|
derwolfe/teiler,derwolfe/teiler
|
from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
- self.numberRequests += 1
request.setHeader("content-type", "text/plain")
- return "I am request #" + str(self.numberRequests) + "\n"
+ return "Welcome to teiler\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
|
Set root resource to welcome
|
## Code Before:
from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
self.numberRequests += 1
request.setHeader("content-type", "text/plain")
return "I am request #" + str(self.numberRequests) + "\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
## Instruction:
Set root resource to welcome
## Code After:
from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
request.setHeader("content-type", "text/plain")
return "Welcome to teiler\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
|
from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
- self.numberRequests += 1
request.setHeader("content-type", "text/plain")
- return "I am request #" + str(self.numberRequests) + "\n"
+ return "Welcome to teiler\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
|
364fde2dd6554760ca63c5b16e35222d5482999e
|
report/report_util.py
|
report/report_util.py
|
def compare_ledger_types(account, data, orm):
selected_ledger = data['form']['ledger_type']
account_ledgers = [ledger.id for ledger in account.ledger_types]
if not selected_ledger:
return account_ledgers == []
return selected_ledger in account_ledgers
def should_show_account(account, data):
if 'account_from' not in data['form'] or 'account_to' not in data['form']:
return True
low = data['form']['account_from']
high = data['form']['account_to']
return low <= account.code <= high
|
def compare_ledger_types(account, data, orm):
if not hasattr(account, 'ledger_types'):
# Ignore this filter when alternate_ledger is not installed.
return True
selected_ledger = data['form']['ledger_type']
account_ledgers = [ledger.id for ledger in account.ledger_types]
if not selected_ledger:
return account_ledgers == []
return selected_ledger in account_ledgers
def should_show_account(account, data):
if 'account_from' not in data['form'] or 'account_to' not in data['form']:
return True
low = data['form']['account_from']
high = data['form']['account_to']
return low <= account.code <= high
|
Fix errors when alternate_ledger is not installed
|
Fix errors when alternate_ledger is not installed
|
Python
|
agpl-3.0
|
lithint/account_report_webkit,xcgd/account_report_webkit,xcgd/account_report_webkit,lithint/account_report_webkit
|
def compare_ledger_types(account, data, orm):
+ if not hasattr(account, 'ledger_types'):
+ # Ignore this filter when alternate_ledger is not installed.
+ return True
+
selected_ledger = data['form']['ledger_type']
account_ledgers = [ledger.id for ledger in account.ledger_types]
if not selected_ledger:
return account_ledgers == []
return selected_ledger in account_ledgers
def should_show_account(account, data):
if 'account_from' not in data['form'] or 'account_to' not in data['form']:
return True
low = data['form']['account_from']
high = data['form']['account_to']
return low <= account.code <= high
|
Fix errors when alternate_ledger is not installed
|
## Code Before:
def compare_ledger_types(account, data, orm):
selected_ledger = data['form']['ledger_type']
account_ledgers = [ledger.id for ledger in account.ledger_types]
if not selected_ledger:
return account_ledgers == []
return selected_ledger in account_ledgers
def should_show_account(account, data):
if 'account_from' not in data['form'] or 'account_to' not in data['form']:
return True
low = data['form']['account_from']
high = data['form']['account_to']
return low <= account.code <= high
## Instruction:
Fix errors when alternate_ledger is not installed
## Code After:
def compare_ledger_types(account, data, orm):
if not hasattr(account, 'ledger_types'):
# Ignore this filter when alternate_ledger is not installed.
return True
selected_ledger = data['form']['ledger_type']
account_ledgers = [ledger.id for ledger in account.ledger_types]
if not selected_ledger:
return account_ledgers == []
return selected_ledger in account_ledgers
def should_show_account(account, data):
if 'account_from' not in data['form'] or 'account_to' not in data['form']:
return True
low = data['form']['account_from']
high = data['form']['account_to']
return low <= account.code <= high
|
def compare_ledger_types(account, data, orm):
+ if not hasattr(account, 'ledger_types'):
+ # Ignore this filter when alternate_ledger is not installed.
+ return True
+
selected_ledger = data['form']['ledger_type']
account_ledgers = [ledger.id for ledger in account.ledger_types]
if not selected_ledger:
return account_ledgers == []
return selected_ledger in account_ledgers
def should_show_account(account, data):
if 'account_from' not in data['form'] or 'account_to' not in data['form']:
return True
low = data['form']['account_from']
high = data['form']['account_to']
return low <= account.code <= high
|
84e9532487615f684abbed17d6821ae7bc84c9be
|
virtualfish/loader/__init__.py
|
virtualfish/loader/__init__.py
|
from __future__ import print_function
import os
import sys
import pkg_resources
def load(plugins=()):
try:
version = pkg_resources.get_distribution("virtualfish").version
commands = ["set -g VIRTUALFISH_VERSION {}".format(version)]
except pkg_resources.DistributionNotFound:
commands = []
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
commands += [
"set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
"source {}".format(os.path.join(base_path, "virtual.fish")),
]
for plugin in plugins:
path = os.path.join(base_path, plugin + ".fish")
if os.path.exists(path):
commands.append("source {}".format(path))
else:
raise ValueError("Plugin does not exist: " + plugin)
commands.append("emit virtualfish_did_setup_plugins")
return commands
|
from __future__ import print_function
import os
import sys
import pkg_resources
def load(plugins=(), full_install=True):
try:
version = pkg_resources.get_distribution("virtualfish").version
commands = ["set -g VIRTUALFISH_VERSION {}".format(version)]
except pkg_resources.DistributionNotFound:
commands = []
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if full_install:
commands += [
"set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
"source {}".format(os.path.join(base_path, "virtual.fish")),
]
else:
commands = []
for plugin in plugins:
path = os.path.join(base_path, plugin + ".fish")
if os.path.exists(path):
commands.append("source {}".format(path))
else:
raise ValueError("Plugin does not exist: " + plugin)
if full_install:
commands.append("emit virtualfish_did_setup_plugins")
return commands
|
Add kwarg to load function to distinguish from full install
|
Add kwarg to load function to distinguish from full install
The load function is used for a full install and thus always adds
general configuration lines to the loader file, but we don't want that
for plugin installation.
|
Python
|
mit
|
adambrenecki/virtualfish,adambrenecki/virtualfish
|
from __future__ import print_function
import os
import sys
import pkg_resources
- def load(plugins=()):
+ def load(plugins=(), full_install=True):
try:
version = pkg_resources.get_distribution("virtualfish").version
commands = ["set -g VIRTUALFISH_VERSION {}".format(version)]
except pkg_resources.DistributionNotFound:
commands = []
+ base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
- base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ if full_install:
- commands += [
+ commands += [
- "set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
+ "set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
- "source {}".format(os.path.join(base_path, "virtual.fish")),
+ "source {}".format(os.path.join(base_path, "virtual.fish")),
- ]
+ ]
+ else:
+ commands = []
for plugin in plugins:
path = os.path.join(base_path, plugin + ".fish")
if os.path.exists(path):
commands.append("source {}".format(path))
else:
raise ValueError("Plugin does not exist: " + plugin)
+ if full_install:
- commands.append("emit virtualfish_did_setup_plugins")
+ commands.append("emit virtualfish_did_setup_plugins")
+
return commands
|
Add kwarg to load function to distinguish from full install
|
## Code Before:
from __future__ import print_function
import os
import sys
import pkg_resources
def load(plugins=()):
try:
version = pkg_resources.get_distribution("virtualfish").version
commands = ["set -g VIRTUALFISH_VERSION {}".format(version)]
except pkg_resources.DistributionNotFound:
commands = []
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
commands += [
"set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
"source {}".format(os.path.join(base_path, "virtual.fish")),
]
for plugin in plugins:
path = os.path.join(base_path, plugin + ".fish")
if os.path.exists(path):
commands.append("source {}".format(path))
else:
raise ValueError("Plugin does not exist: " + plugin)
commands.append("emit virtualfish_did_setup_plugins")
return commands
## Instruction:
Add kwarg to load function to distinguish from full install
## Code After:
from __future__ import print_function
import os
import sys
import pkg_resources
def load(plugins=(), full_install=True):
try:
version = pkg_resources.get_distribution("virtualfish").version
commands = ["set -g VIRTUALFISH_VERSION {}".format(version)]
except pkg_resources.DistributionNotFound:
commands = []
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if full_install:
commands += [
"set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
"source {}".format(os.path.join(base_path, "virtual.fish")),
]
else:
commands = []
for plugin in plugins:
path = os.path.join(base_path, plugin + ".fish")
if os.path.exists(path):
commands.append("source {}".format(path))
else:
raise ValueError("Plugin does not exist: " + plugin)
if full_install:
commands.append("emit virtualfish_did_setup_plugins")
return commands
|
from __future__ import print_function
import os
import sys
import pkg_resources
- def load(plugins=()):
+ def load(plugins=(), full_install=True):
try:
version = pkg_resources.get_distribution("virtualfish").version
commands = ["set -g VIRTUALFISH_VERSION {}".format(version)]
except pkg_resources.DistributionNotFound:
commands = []
+ base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
- base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ if full_install:
- commands += [
+ commands += [
? ++++
- "set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
+ "set -g VIRTUALFISH_PYTHON_EXEC {}".format(sys.executable),
? ++++
- "source {}".format(os.path.join(base_path, "virtual.fish")),
+ "source {}".format(os.path.join(base_path, "virtual.fish")),
? ++++
- ]
+ ]
+ else:
+ commands = []
for plugin in plugins:
path = os.path.join(base_path, plugin + ".fish")
if os.path.exists(path):
commands.append("source {}".format(path))
else:
raise ValueError("Plugin does not exist: " + plugin)
+ if full_install:
- commands.append("emit virtualfish_did_setup_plugins")
+ commands.append("emit virtualfish_did_setup_plugins")
? ++++
+
return commands
|
dff2120a65daacfb1add8da604483f354abcefa2
|
src/pygrapes/serializer/__init__.py
|
src/pygrapes/serializer/__init__.py
|
from abstract import Abstract
from json import Json
from msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
|
from pygrapes.serializer.abstract import Abstract
from pygrapes.serializer.json import Json
from pygrapes.serializer.msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
|
Load resources by absolute path not relative
|
Load resources by absolute path not relative
|
Python
|
bsd-3-clause
|
michalbachowski/pygrapes,michalbachowski/pygrapes,michalbachowski/pygrapes
|
- from abstract import Abstract
- from json import Json
- from msgpack import MsgPack
+ from pygrapes.serializer.abstract import Abstract
+ from pygrapes.serializer.json import Json
+ from pygrapes.serializer.msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
|
Load resources by absolute path not relative
|
## Code Before:
from abstract import Abstract
from json import Json
from msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
## Instruction:
Load resources by absolute path not relative
## Code After:
from pygrapes.serializer.abstract import Abstract
from pygrapes.serializer.json import Json
from pygrapes.serializer.msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
|
- from abstract import Abstract
- from json import Json
- from msgpack import MsgPack
+ from pygrapes.serializer.abstract import Abstract
+ from pygrapes.serializer.json import Json
+ from pygrapes.serializer.msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
|
40095b001ab95fda4cc80bcc807508e9580ebf2d
|
fireplace/cards/gvg/neutral_legendary.py
|
fireplace/cards/gvg/neutral_legendary.py
|
from ..utils import *
##
# Minions
# Dr. Boom
class GVG_110:
action = [Summon(CONTROLLER, "GVG_110t") * 2]
# Boom Bot
class GVG_110t:
def deathrattle(self):
return [Hit(RANDOM_ENEMY_CHARACTER, random.randint(1, 4))]
# Sneed's Old Shredder
class GVG_114:
def deathrattle(self):
legendary = randomCollectible(type=CardType.MINION, rarity=Rarity.LEGENDARY)
return [Summon(CONTROLLER, legendary)]
|
from ..utils import *
##
# Minions
# Dr. Boom
class GVG_110:
action = [Summon(CONTROLLER, "GVG_110t") * 2]
# Boom Bot
class GVG_110t:
def deathrattle(self):
return [Hit(RANDOM_ENEMY_CHARACTER, random.randint(1, 4))]
# Sneed's Old Shredder
class GVG_114:
def deathrattle(self):
legendary = randomCollectible(type=CardType.MINION, rarity=Rarity.LEGENDARY)
return [Summon(CONTROLLER, legendary)]
# Toshley
class GVG_115:
action = [GiveSparePart(CONTROLLER)]
deathrattle = [GiveSparePart(CONTROLLER)]
# Mekgineer Thermaplugg
class GVG_116:
def MINION_DESTROY(self, minion):
if minion.controller is not self.controller:
return [Summon(CONTROLLER, "EX1_029")]
# Gazlowe
class GVG_117:
def OWN_CARD_PLAYED(self, card):
if card.type == CardType.SPELL and card.cost == 1:
return [Give(CONTROLLER, randomCollectible(race=Race.MECHANICAL))]
|
Implement Toshley, Mekgineer Thermaplugg and Gazlowe
|
Implement Toshley, Mekgineer Thermaplugg and Gazlowe
|
Python
|
agpl-3.0
|
amw2104/fireplace,oftc-ftw/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,liujimj/fireplace,oftc-ftw/fireplace,amw2104/fireplace,butozerca/fireplace,Ragowit/fireplace,liujimj/fireplace,jleclanche/fireplace,butozerca/fireplace,Ragowit/fireplace,NightKev/fireplace,beheh/fireplace,Meerkov/fireplace,Meerkov/fireplace
|
from ..utils import *
##
# Minions
# Dr. Boom
class GVG_110:
action = [Summon(CONTROLLER, "GVG_110t") * 2]
# Boom Bot
class GVG_110t:
def deathrattle(self):
return [Hit(RANDOM_ENEMY_CHARACTER, random.randint(1, 4))]
# Sneed's Old Shredder
class GVG_114:
def deathrattle(self):
legendary = randomCollectible(type=CardType.MINION, rarity=Rarity.LEGENDARY)
return [Summon(CONTROLLER, legendary)]
+
+ # Toshley
+ class GVG_115:
+ action = [GiveSparePart(CONTROLLER)]
+ deathrattle = [GiveSparePart(CONTROLLER)]
+
+
+ # Mekgineer Thermaplugg
+ class GVG_116:
+ def MINION_DESTROY(self, minion):
+ if minion.controller is not self.controller:
+ return [Summon(CONTROLLER, "EX1_029")]
+
+
+ # Gazlowe
+ class GVG_117:
+ def OWN_CARD_PLAYED(self, card):
+ if card.type == CardType.SPELL and card.cost == 1:
+ return [Give(CONTROLLER, randomCollectible(race=Race.MECHANICAL))]
+
|
Implement Toshley, Mekgineer Thermaplugg and Gazlowe
|
## Code Before:
from ..utils import *
##
# Minions
# Dr. Boom
class GVG_110:
action = [Summon(CONTROLLER, "GVG_110t") * 2]
# Boom Bot
class GVG_110t:
def deathrattle(self):
return [Hit(RANDOM_ENEMY_CHARACTER, random.randint(1, 4))]
# Sneed's Old Shredder
class GVG_114:
def deathrattle(self):
legendary = randomCollectible(type=CardType.MINION, rarity=Rarity.LEGENDARY)
return [Summon(CONTROLLER, legendary)]
## Instruction:
Implement Toshley, Mekgineer Thermaplugg and Gazlowe
## Code After:
from ..utils import *
##
# Minions
# Dr. Boom
class GVG_110:
action = [Summon(CONTROLLER, "GVG_110t") * 2]
# Boom Bot
class GVG_110t:
def deathrattle(self):
return [Hit(RANDOM_ENEMY_CHARACTER, random.randint(1, 4))]
# Sneed's Old Shredder
class GVG_114:
def deathrattle(self):
legendary = randomCollectible(type=CardType.MINION, rarity=Rarity.LEGENDARY)
return [Summon(CONTROLLER, legendary)]
# Toshley
class GVG_115:
action = [GiveSparePart(CONTROLLER)]
deathrattle = [GiveSparePart(CONTROLLER)]
# Mekgineer Thermaplugg
class GVG_116:
def MINION_DESTROY(self, minion):
if minion.controller is not self.controller:
return [Summon(CONTROLLER, "EX1_029")]
# Gazlowe
class GVG_117:
def OWN_CARD_PLAYED(self, card):
if card.type == CardType.SPELL and card.cost == 1:
return [Give(CONTROLLER, randomCollectible(race=Race.MECHANICAL))]
|
from ..utils import *
##
# Minions
# Dr. Boom
class GVG_110:
action = [Summon(CONTROLLER, "GVG_110t") * 2]
# Boom Bot
class GVG_110t:
def deathrattle(self):
return [Hit(RANDOM_ENEMY_CHARACTER, random.randint(1, 4))]
# Sneed's Old Shredder
class GVG_114:
def deathrattle(self):
legendary = randomCollectible(type=CardType.MINION, rarity=Rarity.LEGENDARY)
return [Summon(CONTROLLER, legendary)]
+
+
+ # Toshley
+ class GVG_115:
+ action = [GiveSparePart(CONTROLLER)]
+ deathrattle = [GiveSparePart(CONTROLLER)]
+
+
+ # Mekgineer Thermaplugg
+ class GVG_116:
+ def MINION_DESTROY(self, minion):
+ if minion.controller is not self.controller:
+ return [Summon(CONTROLLER, "EX1_029")]
+
+
+ # Gazlowe
+ class GVG_117:
+ def OWN_CARD_PLAYED(self, card):
+ if card.type == CardType.SPELL and card.cost == 1:
+ return [Give(CONTROLLER, randomCollectible(race=Race.MECHANICAL))]
|
7f42966277eff0d16fd15d5192cffcf7a91aae2e
|
expyfun/__init__.py
|
expyfun/__init__.py
|
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
# initialize logging
set_log_level(None, False)
|
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
from . import analyze # fast enough, include here
# initialize logging
set_log_level(None, False)
|
Add `analyze` to `expyfun` init
|
FIX: Add `analyze` to `expyfun` init
|
Python
|
bsd-3-clause
|
LABSN/expyfun,rkmaddox/expyfun,Eric89GXL/expyfun,lkishline/expyfun,drammock/expyfun
|
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
+ from . import analyze # fast enough, include here
# initialize logging
set_log_level(None, False)
|
Add `analyze` to `expyfun` init
|
## Code Before:
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
# initialize logging
set_log_level(None, False)
## Instruction:
Add `analyze` to `expyfun` init
## Code After:
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
from . import analyze # fast enough, include here
# initialize logging
set_log_level(None, False)
|
__version__ = '1.1.0.git'
# have to import verbose first since it's needed by many things
from ._utils import set_log_level, set_config, \
get_config, get_config_path
from ._utils import verbose_dec as verbose
from ._experiment_controller import ExperimentController, wait_secs
from ._eyelink_controller import EyelinkController
from ._create_system_config import create_system_config
+ from . import analyze # fast enough, include here
# initialize logging
set_log_level(None, False)
|
ce05a68965a252a1756d6eac64bf319ef17ed158
|
packages/python/plotly/plotly/io/_utils.py
|
packages/python/plotly/plotly/io/_utils.py
|
from __future__ import absolute_import
import plotly
import plotly.graph_objs as go
def validate_coerce_fig_to_dict(fig, validate, clone=True):
from plotly.basedatatypes import BaseFigure
if isinstance(fig, BaseFigure):
fig_dict = fig.to_dict(clone=clone)
elif isinstance(fig, dict):
if validate:
# This will raise an exception if fig is not a valid plotly figure
fig_dict = plotly.graph_objs.Figure(fig).to_plotly_json()
else:
fig_dict = fig
else:
raise ValueError(
"""
The fig parameter must be a dict or Figure.
Received value of type {typ}: {v}""".format(
typ=type(fig), v=fig
)
)
return fig_dict
def validate_coerce_output_type(output_type):
if output_type == "Figure" or output_type == go.Figure:
cls = go.Figure
elif output_type == "FigureWidget" or (
hasattr(go, "FigureWidget") and output_type == go.FigureWidget
):
cls = go.FigureWidget
else:
raise ValueError(
"""
Invalid output type: {output_type}
Must be one of: 'Figure', 'FigureWidget'"""
)
return cls
|
from __future__ import absolute_import
import plotly
import plotly.graph_objs as go
def validate_coerce_fig_to_dict(fig, validate, clone=True):
from plotly.basedatatypes import BaseFigure
if isinstance(fig, BaseFigure):
fig_dict = fig.to_dict(clone=clone)
elif isinstance(fig, dict):
if validate:
# This will raise an exception if fig is not a valid plotly figure
fig_dict = plotly.graph_objs.Figure(fig).to_plotly_json()
else:
fig_dict = fig
elif hasattr(fig, "to_plotly_json"):
fig_dict = fig.to_plotly_json()
else:
raise ValueError(
"""
The fig parameter must be a dict or Figure.
Received value of type {typ}: {v}""".format(
typ=type(fig), v=fig
)
)
return fig_dict
def validate_coerce_output_type(output_type):
if output_type == "Figure" or output_type == go.Figure:
cls = go.Figure
elif output_type == "FigureWidget" or (
hasattr(go, "FigureWidget") and output_type == go.FigureWidget
):
cls = go.FigureWidget
else:
raise ValueError(
"""
Invalid output type: {output_type}
Must be one of: 'Figure', 'FigureWidget'"""
)
return cls
|
Handle Dash objects in to_json
|
Handle Dash objects in to_json
|
Python
|
mit
|
plotly/plotly.py,plotly/plotly.py,plotly/plotly.py
|
from __future__ import absolute_import
import plotly
import plotly.graph_objs as go
def validate_coerce_fig_to_dict(fig, validate, clone=True):
from plotly.basedatatypes import BaseFigure
if isinstance(fig, BaseFigure):
fig_dict = fig.to_dict(clone=clone)
elif isinstance(fig, dict):
if validate:
# This will raise an exception if fig is not a valid plotly figure
fig_dict = plotly.graph_objs.Figure(fig).to_plotly_json()
else:
fig_dict = fig
+ elif hasattr(fig, "to_plotly_json"):
+ fig_dict = fig.to_plotly_json()
else:
raise ValueError(
"""
The fig parameter must be a dict or Figure.
Received value of type {typ}: {v}""".format(
typ=type(fig), v=fig
)
)
return fig_dict
def validate_coerce_output_type(output_type):
if output_type == "Figure" or output_type == go.Figure:
cls = go.Figure
elif output_type == "FigureWidget" or (
hasattr(go, "FigureWidget") and output_type == go.FigureWidget
):
cls = go.FigureWidget
else:
raise ValueError(
"""
Invalid output type: {output_type}
Must be one of: 'Figure', 'FigureWidget'"""
)
return cls
|
Handle Dash objects in to_json
|
## Code Before:
from __future__ import absolute_import
import plotly
import plotly.graph_objs as go
def validate_coerce_fig_to_dict(fig, validate, clone=True):
from plotly.basedatatypes import BaseFigure
if isinstance(fig, BaseFigure):
fig_dict = fig.to_dict(clone=clone)
elif isinstance(fig, dict):
if validate:
# This will raise an exception if fig is not a valid plotly figure
fig_dict = plotly.graph_objs.Figure(fig).to_plotly_json()
else:
fig_dict = fig
else:
raise ValueError(
"""
The fig parameter must be a dict or Figure.
Received value of type {typ}: {v}""".format(
typ=type(fig), v=fig
)
)
return fig_dict
def validate_coerce_output_type(output_type):
if output_type == "Figure" or output_type == go.Figure:
cls = go.Figure
elif output_type == "FigureWidget" or (
hasattr(go, "FigureWidget") and output_type == go.FigureWidget
):
cls = go.FigureWidget
else:
raise ValueError(
"""
Invalid output type: {output_type}
Must be one of: 'Figure', 'FigureWidget'"""
)
return cls
## Instruction:
Handle Dash objects in to_json
## Code After:
from __future__ import absolute_import
import plotly
import plotly.graph_objs as go
def validate_coerce_fig_to_dict(fig, validate, clone=True):
from plotly.basedatatypes import BaseFigure
if isinstance(fig, BaseFigure):
fig_dict = fig.to_dict(clone=clone)
elif isinstance(fig, dict):
if validate:
# This will raise an exception if fig is not a valid plotly figure
fig_dict = plotly.graph_objs.Figure(fig).to_plotly_json()
else:
fig_dict = fig
elif hasattr(fig, "to_plotly_json"):
fig_dict = fig.to_plotly_json()
else:
raise ValueError(
"""
The fig parameter must be a dict or Figure.
Received value of type {typ}: {v}""".format(
typ=type(fig), v=fig
)
)
return fig_dict
def validate_coerce_output_type(output_type):
if output_type == "Figure" or output_type == go.Figure:
cls = go.Figure
elif output_type == "FigureWidget" or (
hasattr(go, "FigureWidget") and output_type == go.FigureWidget
):
cls = go.FigureWidget
else:
raise ValueError(
"""
Invalid output type: {output_type}
Must be one of: 'Figure', 'FigureWidget'"""
)
return cls
|
from __future__ import absolute_import
import plotly
import plotly.graph_objs as go
def validate_coerce_fig_to_dict(fig, validate, clone=True):
from plotly.basedatatypes import BaseFigure
if isinstance(fig, BaseFigure):
fig_dict = fig.to_dict(clone=clone)
elif isinstance(fig, dict):
if validate:
# This will raise an exception if fig is not a valid plotly figure
fig_dict = plotly.graph_objs.Figure(fig).to_plotly_json()
else:
fig_dict = fig
+ elif hasattr(fig, "to_plotly_json"):
+ fig_dict = fig.to_plotly_json()
else:
raise ValueError(
"""
The fig parameter must be a dict or Figure.
Received value of type {typ}: {v}""".format(
typ=type(fig), v=fig
)
)
return fig_dict
def validate_coerce_output_type(output_type):
if output_type == "Figure" or output_type == go.Figure:
cls = go.Figure
elif output_type == "FigureWidget" or (
hasattr(go, "FigureWidget") and output_type == go.FigureWidget
):
cls = go.FigureWidget
else:
raise ValueError(
"""
Invalid output type: {output_type}
Must be one of: 'Figure', 'FigureWidget'"""
)
return cls
|
0948e7a25e79b01dae3c5b6cf9b0c272e2d196b7
|
moviepy/video/fx/scroll.py
|
moviepy/video/fx/scroll.py
|
import numpy as np
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0,
x_start=0, y_start=0, apply_to="mask"):
""" Scrolls horizontally or vertically a clip, e.g. to make end
credits """
if h is None: h = clip.h
if w is None: w = clip.w
xmax = clip.w-w-1
ymax = clip.h-h-1
def f(gf,t):
x = max(0, min(xmax, x_start+ np.round(x_speed*t)))
y = max(0, min(ymax, y_start+ np.round(y_speed*t)))
return gf(t)[y:y+h, x:x+w]
return clip.fl(f, apply_to = apply_to)
|
import numpy as np
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0,
x_start=0, y_start=0, apply_to="mask"):
""" Scrolls horizontally or vertically a clip, e.g. to make end
credits """
if h is None: h = clip.h
if w is None: w = clip.w
xmax = clip.w-w-1
ymax = clip.h-h-1
def f(gf,t):
x = int(max(0, min(xmax, x_start+ np.round(x_speed*t))))
y = int(max(0, min(ymax, y_start+ np.round(y_speed*t))))
return gf(t)[y:y+h, x:x+w]
return clip.fl(f, apply_to = apply_to)
|
Add int() wrapper to prevent floats
|
Add int() wrapper to prevent floats
|
Python
|
mit
|
Zulko/moviepy,ssteo/moviepy,kerstin/moviepy
|
import numpy as np
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0,
x_start=0, y_start=0, apply_to="mask"):
""" Scrolls horizontally or vertically a clip, e.g. to make end
credits """
if h is None: h = clip.h
if w is None: w = clip.w
xmax = clip.w-w-1
ymax = clip.h-h-1
def f(gf,t):
- x = max(0, min(xmax, x_start+ np.round(x_speed*t)))
+ x = int(max(0, min(xmax, x_start+ np.round(x_speed*t))))
- y = max(0, min(ymax, y_start+ np.round(y_speed*t)))
+ y = int(max(0, min(ymax, y_start+ np.round(y_speed*t))))
return gf(t)[y:y+h, x:x+w]
return clip.fl(f, apply_to = apply_to)
|
Add int() wrapper to prevent floats
|
## Code Before:
import numpy as np
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0,
x_start=0, y_start=0, apply_to="mask"):
""" Scrolls horizontally or vertically a clip, e.g. to make end
credits """
if h is None: h = clip.h
if w is None: w = clip.w
xmax = clip.w-w-1
ymax = clip.h-h-1
def f(gf,t):
x = max(0, min(xmax, x_start+ np.round(x_speed*t)))
y = max(0, min(ymax, y_start+ np.round(y_speed*t)))
return gf(t)[y:y+h, x:x+w]
return clip.fl(f, apply_to = apply_to)
## Instruction:
Add int() wrapper to prevent floats
## Code After:
import numpy as np
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0,
x_start=0, y_start=0, apply_to="mask"):
""" Scrolls horizontally or vertically a clip, e.g. to make end
credits """
if h is None: h = clip.h
if w is None: w = clip.w
xmax = clip.w-w-1
ymax = clip.h-h-1
def f(gf,t):
x = int(max(0, min(xmax, x_start+ np.round(x_speed*t))))
y = int(max(0, min(ymax, y_start+ np.round(y_speed*t))))
return gf(t)[y:y+h, x:x+w]
return clip.fl(f, apply_to = apply_to)
|
import numpy as np
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0,
x_start=0, y_start=0, apply_to="mask"):
""" Scrolls horizontally or vertically a clip, e.g. to make end
credits """
if h is None: h = clip.h
if w is None: w = clip.w
xmax = clip.w-w-1
ymax = clip.h-h-1
def f(gf,t):
- x = max(0, min(xmax, x_start+ np.round(x_speed*t)))
+ x = int(max(0, min(xmax, x_start+ np.round(x_speed*t))))
? ++++ +
- y = max(0, min(ymax, y_start+ np.round(y_speed*t)))
+ y = int(max(0, min(ymax, y_start+ np.round(y_speed*t))))
? ++++ +
return gf(t)[y:y+h, x:x+w]
return clip.fl(f, apply_to = apply_to)
|
d3078cafd4e64e9c093d9d823df2035b8380d643
|
meta-refkit-computervision/recipes-computervision/caffe-bvlc-reference/files/dnn-test.py
|
meta-refkit-computervision/recipes-computervision/caffe-bvlc-reference/files/dnn-test.py
|
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
net.setBlob(".data", data)
# run the network
net.forward()
# print the class
print(str(net.getBlob("prob").argmax()))
|
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
net.setInput(data, "data")
# run the network
result = net.forward("prob")
# print the class
print(str(result.argmax()))
|
Fix DNN test to be compatible with OpenCV 3.3.
|
convnet: Fix DNN test to be compatible with OpenCV 3.3.
OpenCV DNN module API changed with OpenCV 3.3. Fix the tests to use the
new API.
Signed-off-by: Ismo Puustinen <[email protected]>
|
Python
|
mit
|
intel/intel-iot-refkit,mythi/intel-iot-refkit,mythi/intel-iot-refkit,intel/intel-iot-refkit,intel/intel-iot-refkit,intel/intel-iot-refkit,mythi/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,mythi/intel-iot-refkit,mythi/intel-iot-refkit,klihub/intel-iot-refkit,mythi/intel-iot-refkit,intel/intel-iot-refkit,intel/intel-iot-refkit,mythi/intel-iot-refkit,intel/intel-iot-refkit
|
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
- net.setBlob(".data", data)
+ net.setInput(data, "data")
# run the network
- net.forward()
+ result = net.forward("prob")
# print the class
- print(str(net.getBlob("prob").argmax()))
+ print(str(result.argmax()))
|
Fix DNN test to be compatible with OpenCV 3.3.
|
## Code Before:
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
net.setBlob(".data", data)
# run the network
net.forward()
# print the class
print(str(net.getBlob("prob").argmax()))
## Instruction:
Fix DNN test to be compatible with OpenCV 3.3.
## Code After:
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
net.setInput(data, "data")
# run the network
result = net.forward("prob")
# print the class
print(str(result.argmax()))
|
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
- net.setBlob(".data", data)
+ net.setInput(data, "data")
# run the network
- net.forward()
+ result = net.forward("prob")
# print the class
- print(str(net.getBlob("prob").argmax()))
+ print(str(result.argmax()))
|
c684dbb999ac622d5bba266d39e2dd7e69265393
|
yunity/api/utils.py
|
yunity/api/utils.py
|
from django.http import JsonResponse
class ApiBase(object):
STATUS_ERROR = 0
STATUS_SUCCESS = 1
STATUS_WARNING = 2
def json_response(self, data=None, status=STATUS_SUCCESS, message=None):
status_code = 400 if status == ApiBase.STATUS_ERROR else 200
return JsonResponse({
"data": data,
"status": status,
"message": message
}, status=status_code)
|
from django.http import JsonResponse
class ApiBase(object):
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
|
Refactor json_response to more BDD methods
|
Refactor json_response to more BDD methods
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
|
from django.http import JsonResponse
class ApiBase(object):
+ @classmethod
+ def success(cls, data, status=200):
+ """
+ :type data: dict
+ :type status: int
+ :rtype JsonResponse
+ """
+ return JsonResponse(data, status=status)
- STATUS_ERROR = 0
- STATUS_SUCCESS = 1
- STATUS_WARNING = 2
- def json_response(self, data=None, status=STATUS_SUCCESS, message=None):
+ @classmethod
+ def error(cls, error, status=400):
+ """
+ :type error: str
+ :type status: int
+ :rtype JsonResponse
- status_code = 400 if status == ApiBase.STATUS_ERROR else 200
+ """
+ return JsonResponse({'error': error}, status=status)
- return JsonResponse({
- "data": data,
- "status": status,
- "message": message
- }, status=status_code)
-
|
Refactor json_response to more BDD methods
|
## Code Before:
from django.http import JsonResponse
class ApiBase(object):
STATUS_ERROR = 0
STATUS_SUCCESS = 1
STATUS_WARNING = 2
def json_response(self, data=None, status=STATUS_SUCCESS, message=None):
status_code = 400 if status == ApiBase.STATUS_ERROR else 200
return JsonResponse({
"data": data,
"status": status,
"message": message
}, status=status_code)
## Instruction:
Refactor json_response to more BDD methods
## Code After:
from django.http import JsonResponse
class ApiBase(object):
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
|
from django.http import JsonResponse
class ApiBase(object):
+ @classmethod
+ def success(cls, data, status=200):
+ """
+ :type data: dict
+ :type status: int
+ :rtype JsonResponse
+ """
+ return JsonResponse(data, status=status)
- STATUS_ERROR = 0
- STATUS_SUCCESS = 1
- STATUS_WARNING = 2
- def json_response(self, data=None, status=STATUS_SUCCESS, message=None):
+ @classmethod
+ def error(cls, error, status=400):
+ """
+ :type error: str
+ :type status: int
+ :rtype JsonResponse
+ """
+ return JsonResponse({'error': error}, status=status)
- status_code = 400 if status == ApiBase.STATUS_ERROR else 200
-
- return JsonResponse({
- "data": data,
- "status": status,
- "message": message
- }, status=status_code)
|
5329c48a6f0a36809d3088560f91b427f7a2bf0b
|
models.py
|
models.py
|
from datetime import datetime
from app import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String())
pw_hash = db.Column(db.String())
graphs = db.relationship("Graph", backref="user", lazy="dynamic")
def __init__(self, username, email, name, password):
self.username = username
self.email = email
self.name = name
self.pw_hash = bcrypt.generate_password_hash(password).decode("utf-8")
def __repr__(self):
return self.username
def check_password(self, password):
return bcrypt.check_password_hash(self.pw_hash, password)
class Graph(db.Model):
__tablename__ = "graphs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime)
title = db.Column(db.String())
serialized_string = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
def __init__(self, serialized_string):
self.created_at = datetime.utcnow()
self.serialized_string = serialized_string
def __repr__(self):
return self.title
|
from datetime import datetime
from app import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String())
pw_hash = db.Column(db.String())
graphs = db.relationship("Graph", backref="user", lazy="dynamic")
def __init__(self, username, email, name, password):
self.username = username
self.email = email
self.name = name
self.pw_hash = bcrypt.generate_password_hash(password).decode("utf-8")
def __repr__(self):
return self.username
def check_password(self, password):
return bcrypt.check_password_hash(self.pw_hash, password)
class Graph(db.Model):
__tablename__ = "graphs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime)
title = db.Column(db.String())
serialized_string = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
def __init__(self, title, serialized_string):
self.created_at = datetime.utcnow()
self.title = title
self.serialized_string = serialized_string
def __repr__(self):
return self.title
|
Add title to Graph object constructor
|
Add title to Graph object constructor
|
Python
|
mit
|
ChristopherChudzicki/math3d,stardust66/math3d,stardust66/math3d,stardust66/math3d,stardust66/math3d,ChristopherChudzicki/math3d,ChristopherChudzicki/math3d,ChristopherChudzicki/math3d
|
from datetime import datetime
from app import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String())
pw_hash = db.Column(db.String())
graphs = db.relationship("Graph", backref="user", lazy="dynamic")
def __init__(self, username, email, name, password):
self.username = username
self.email = email
self.name = name
self.pw_hash = bcrypt.generate_password_hash(password).decode("utf-8")
def __repr__(self):
return self.username
def check_password(self, password):
return bcrypt.check_password_hash(self.pw_hash, password)
class Graph(db.Model):
__tablename__ = "graphs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime)
title = db.Column(db.String())
serialized_string = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
- def __init__(self, serialized_string):
+ def __init__(self, title, serialized_string):
self.created_at = datetime.utcnow()
+ self.title = title
self.serialized_string = serialized_string
def __repr__(self):
return self.title
|
Add title to Graph object constructor
|
## Code Before:
from datetime import datetime
from app import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String())
pw_hash = db.Column(db.String())
graphs = db.relationship("Graph", backref="user", lazy="dynamic")
def __init__(self, username, email, name, password):
self.username = username
self.email = email
self.name = name
self.pw_hash = bcrypt.generate_password_hash(password).decode("utf-8")
def __repr__(self):
return self.username
def check_password(self, password):
return bcrypt.check_password_hash(self.pw_hash, password)
class Graph(db.Model):
__tablename__ = "graphs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime)
title = db.Column(db.String())
serialized_string = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
def __init__(self, serialized_string):
self.created_at = datetime.utcnow()
self.serialized_string = serialized_string
def __repr__(self):
return self.title
## Instruction:
Add title to Graph object constructor
## Code After:
from datetime import datetime
from app import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String())
pw_hash = db.Column(db.String())
graphs = db.relationship("Graph", backref="user", lazy="dynamic")
def __init__(self, username, email, name, password):
self.username = username
self.email = email
self.name = name
self.pw_hash = bcrypt.generate_password_hash(password).decode("utf-8")
def __repr__(self):
return self.username
def check_password(self, password):
return bcrypt.check_password_hash(self.pw_hash, password)
class Graph(db.Model):
__tablename__ = "graphs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime)
title = db.Column(db.String())
serialized_string = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
def __init__(self, title, serialized_string):
self.created_at = datetime.utcnow()
self.title = title
self.serialized_string = serialized_string
def __repr__(self):
return self.title
|
from datetime import datetime
from app import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String())
pw_hash = db.Column(db.String())
graphs = db.relationship("Graph", backref="user", lazy="dynamic")
def __init__(self, username, email, name, password):
self.username = username
self.email = email
self.name = name
self.pw_hash = bcrypt.generate_password_hash(password).decode("utf-8")
def __repr__(self):
return self.username
def check_password(self, password):
return bcrypt.check_password_hash(self.pw_hash, password)
class Graph(db.Model):
__tablename__ = "graphs"
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime)
title = db.Column(db.String())
serialized_string = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
- def __init__(self, serialized_string):
+ def __init__(self, title, serialized_string):
? +++++++
self.created_at = datetime.utcnow()
+ self.title = title
self.serialized_string = serialized_string
def __repr__(self):
return self.title
|
ac3c855583a023fc76b8720aa7e38419b28a26d4
|
falcom/api/hathi.py
|
falcom/api/hathi.py
|
import json
def get_counts_from_item_list (items, htid):
a = len([x for x in items if x["htid"] == htid])
b = len(items) - a
return a, b
def get_oclc_counts_from_json (json_data, htid = ""):
try:
data = json.loads(json_data)
return get_counts_from_item_list(data["items"], htid)
except:
return 0, 0
def get_hathi_data_from_json ():
return ()
|
import json
class HathiItems:
def __init__ (self):
pass
def __len__ (self):
return 0
def get_counts_from_item_list (items, htid):
a = len([x for x in items if x["htid"] == htid])
b = len(items) - a
return a, b
def get_oclc_counts_from_json (json_data, htid = ""):
try:
data = json.loads(json_data)
return get_counts_from_item_list(data["items"], htid)
except:
return 0, 0
def get_hathi_data_from_json ():
return HathiItems()
|
Refactor empty tuple into empty object with len()
|
Refactor empty tuple into empty object with len()
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
import json
+
+ class HathiItems:
+
+ def __init__ (self):
+ pass
+
+ def __len__ (self):
+ return 0
def get_counts_from_item_list (items, htid):
a = len([x for x in items if x["htid"] == htid])
b = len(items) - a
return a, b
def get_oclc_counts_from_json (json_data, htid = ""):
try:
data = json.loads(json_data)
return get_counts_from_item_list(data["items"], htid)
except:
return 0, 0
def get_hathi_data_from_json ():
- return ()
+ return HathiItems()
|
Refactor empty tuple into empty object with len()
|
## Code Before:
import json
def get_counts_from_item_list (items, htid):
a = len([x for x in items if x["htid"] == htid])
b = len(items) - a
return a, b
def get_oclc_counts_from_json (json_data, htid = ""):
try:
data = json.loads(json_data)
return get_counts_from_item_list(data["items"], htid)
except:
return 0, 0
def get_hathi_data_from_json ():
return ()
## Instruction:
Refactor empty tuple into empty object with len()
## Code After:
import json
class HathiItems:
def __init__ (self):
pass
def __len__ (self):
return 0
def get_counts_from_item_list (items, htid):
a = len([x for x in items if x["htid"] == htid])
b = len(items) - a
return a, b
def get_oclc_counts_from_json (json_data, htid = ""):
try:
data = json.loads(json_data)
return get_counts_from_item_list(data["items"], htid)
except:
return 0, 0
def get_hathi_data_from_json ():
return HathiItems()
|
import json
+
+ class HathiItems:
+
+ def __init__ (self):
+ pass
+
+ def __len__ (self):
+ return 0
def get_counts_from_item_list (items, htid):
a = len([x for x in items if x["htid"] == htid])
b = len(items) - a
return a, b
def get_oclc_counts_from_json (json_data, htid = ""):
try:
data = json.loads(json_data)
return get_counts_from_item_list(data["items"], htid)
except:
return 0, 0
def get_hathi_data_from_json ():
- return ()
+ return HathiItems()
|
df385ac3c06018a2d151ead1e07293166ff92614
|
erpnext/patches/v11_0/move_leave_approvers_from_employee.py
|
erpnext/patches/v11_0/move_leave_approvers_from_employee.py
|
import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()
|
import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()
|
Check if table exists else return
|
Check if table exists else return
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
+
+ if not frappe.db.table_exists("Employee Leave Approver"):
+ return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()
|
Check if table exists else return
|
## Code Before:
import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()
## Instruction:
Check if table exists else return
## Code After:
import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()
|
import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
+
+ if not frappe.db.table_exists("Employee Leave Approver"):
+ return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()
|
31d0af7d5f3a984d4f6c7be62d599553a3bc7c08
|
opps/articles/utils.py
|
opps/articles/utils.py
|
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
context['channel']['root'] = self.channel.get_root()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
Add channel root on set context data, sent to template
|
Add channel root on set context data, sent to template
|
Python
|
mit
|
YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps
|
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
+ context['channel']['root'] = self.channel.get_root()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
Add channel root on set context data, sent to template
|
## Code Before:
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
## Instruction:
Add channel root on set context data, sent to template
## Code After:
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
context['channel']['root'] = self.channel.get_root()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
+ context['channel']['root'] = self.channel.get_root()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
22855458c7c683353f2ed7b577289b63da8bc9c6
|
src/scikit-cycling/skcycling/utils/io_fit.py
|
src/scikit-cycling/skcycling/utils/io_fit.py
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
Solve the issue of the power got disconnected during the ride
|
Solve the issue of the power got disconnected during the ride
|
Python
|
mit
|
glemaitre/power-profile,glemaitre/power-profile,clemaitre58/power-profile,clemaitre58/power-profile
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
- raise ValueError('There record without power values. Check what is happening.')
+ # raise ValueError('There record without power values. Check what is happening.')
+ # We put the value to 0 since that it will not influence
+ # the computation of the RPP
+ power_rec[idx_rec] = 0.
return power_rec
|
Solve the issue of the power got disconnected during the ride
|
## Code Before:
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
## Instruction:
Solve the issue of the power got disconnected during the ride
## Code After:
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
- raise ValueError('There record without power values. Check what is happening.')
+ # raise ValueError('There record without power values. Check what is happening.')
? ++
+ # We put the value to 0 since that it will not influence
+ # the computation of the RPP
+ power_rec[idx_rec] = 0.
return power_rec
|
da619869c8d321863a1cc081189ebda79e1b5dbc
|
djclick/test/test_params.py
|
djclick/test/test_params.py
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
Fix a check for specific formatting of an error message
|
tests: Fix a check for specific formatting of an error message
Instead of checking for the specific formatting of pytest's wrapper
around an exception, check the error message with `ExceptionInfo.match`.
This improves compatibility with different versions of pytest.
|
Python
|
mit
|
GaretJax/django-click
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
+ assert e.match(
- # Use `.endswith()` because of differences between CPython and pypy
- assert str(e).endswith(
- 'BadParameter: could not find testapp.DummyModel with {}'.format(
+ 'could not find testapp.DummyModel with {}'.format(error_message))
- error_message))
|
Fix a check for specific formatting of an error message
|
## Code Before:
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
## Instruction:
Fix a check for specific formatting of an error message
## Code After:
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
+ assert e.match(
- # Use `.endswith()` because of differences between CPython and pypy
- assert str(e).endswith(
- 'BadParameter: could not find testapp.DummyModel with {}'.format(
? --------------
+ 'could not find testapp.DummyModel with {}'.format(error_message))
? +++++++++++++++
- error_message))
|
018baf83b5293799c8f79652c902aa0fa752161e
|
pysswords/credential.py
|
pysswords/credential.py
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
Reformat string representation of Credentials
|
Reformat string representation of Credentials
|
Python
|
mit
|
eiginn/passpie,marcwebbie/passpie,marcwebbie/pysswords,scorphus/passpie,eiginn/passpie,marcwebbie/passpie,scorphus/passpie
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
- return "<Credential: {}, {}, {}>".format(
+ return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
Reformat string representation of Credentials
|
## Code Before:
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
## Instruction:
Reformat string representation of Credentials
## Code After:
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
- return "<Credential: {}, {}, {}>".format(
+ return "<Credential: name={}, login={}, password='...', {}>".format(
? +++++ ++++++ ++++++++++++++++
self.name,
self.login,
self.comments
)
|
683257082b9e2d0aba27e6124cd419a4cf19d2a9
|
docupload/htmlify.py
|
docupload/htmlify.py
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
Remove tmp file after conversion
|
Remove tmp file after conversion
|
Python
|
mit
|
vaibhawW/oksp,vaibhawW/oksp
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
+ import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
+ html = pypandoc.convert(tmp_loc, 'html5')
+ os.remove(tmp_loc)
- return pypandoc.convert(tmp_loc, 'html5')
+ return html
+
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
Remove tmp file after conversion
|
## Code Before:
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
## Instruction:
Remove tmp file after conversion
## Code After:
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
+ import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
+ html = pypandoc.convert(tmp_loc, 'html5')
+ os.remove(tmp_loc)
- return pypandoc.convert(tmp_loc, 'html5')
+ return html
+
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
858f993ceffb497bee12457d1d4102339af410a4
|
typer/__init__.py
|
typer/__init__.py
|
"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
BadArgumentUsage,
BadOptionUsage,
BadParameter,
ClickException,
FileError,
MissingParameter,
NoSuchOption,
UsageError,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
get_os_args,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
|
"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
Exit,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
|
Clean exports from typer, remove unneeded Click components
|
:fire: Clean exports from typer, remove unneeded Click components
and add Exit exception
|
Python
|
mit
|
tiangolo/typer,tiangolo/typer
|
"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
+ Exit,
- BadArgumentUsage,
- BadOptionUsage,
- BadParameter,
- ClickException,
- FileError,
- MissingParameter,
- NoSuchOption,
- UsageError,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
- get_os_args,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
|
Clean exports from typer, remove unneeded Click components
|
## Code Before:
"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
BadArgumentUsage,
BadOptionUsage,
BadParameter,
ClickException,
FileError,
MissingParameter,
NoSuchOption,
UsageError,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
get_os_args,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
## Instruction:
Clean exports from typer, remove unneeded Click components
## Code After:
"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
Exit,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
|
"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
+ Exit,
- BadArgumentUsage,
- BadOptionUsage,
- BadParameter,
- ClickException,
- FileError,
- MissingParameter,
- NoSuchOption,
- UsageError,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
- get_os_args,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
|
a43e1c76ba3bef9ab3cbe1353c3b7289031a3b64
|
pydub/playback.py
|
pydub/playback.py
|
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name
PLAYER = get_player_name()
def play(audio_segment):
with NamedTemporaryFile("w+b", suffix=".wav") as f:
audio_segment.export(f.name, "wav")
subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
|
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name
PLAYER = get_player_name()
def _play_with_ffplay(seg):
with NamedTemporaryFile("w+b", suffix=".wav") as f:
seg.export(f.name, "wav")
subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
def _play_with_pyaudio(seg):
import pyaudio
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(seg.sample_width),
channels=seg.channels,
rate=seg.frame_rate,
output=True)
stream.write(seg._data)
stream.stop_stream()
stream.close()
p.terminate()
def play(audio_segment):
try:
import pyaudio
_play_with_pyaudio(audio_segment)
except ImportError:
_play_with_ffplay(audio_segment)
|
Use Pyaudio when available, ffplay as fallback
|
Use Pyaudio when available, ffplay as fallback
|
Python
|
mit
|
cbelth/pyMusic,miguelgrinberg/pydub,jiaaro/pydub,Geoion/pydub,joshrobo/pydub,sgml/pydub
|
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name
PLAYER = get_player_name()
+
+
+ def _play_with_ffplay(seg):
+ with NamedTemporaryFile("w+b", suffix=".wav") as f:
+ seg.export(f.name, "wav")
+ subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
+
+
+ def _play_with_pyaudio(seg):
+ import pyaudio
+
+ p = pyaudio.PyAudio()
+ stream = p.open(format=p.get_format_from_width(seg.sample_width),
+ channels=seg.channels,
+ rate=seg.frame_rate,
+ output=True)
+
+ stream.write(seg._data)
+ stream.stop_stream()
+ stream.close()
+
+ p.terminate()
+
+
def play(audio_segment):
- with NamedTemporaryFile("w+b", suffix=".wav") as f:
- audio_segment.export(f.name, "wav")
- subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
+ try:
+ import pyaudio
+ _play_with_pyaudio(audio_segment)
+ except ImportError:
+ _play_with_ffplay(audio_segment)
+
|
Use Pyaudio when available, ffplay as fallback
|
## Code Before:
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name
PLAYER = get_player_name()
def play(audio_segment):
with NamedTemporaryFile("w+b", suffix=".wav") as f:
audio_segment.export(f.name, "wav")
subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
## Instruction:
Use Pyaudio when available, ffplay as fallback
## Code After:
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name
PLAYER = get_player_name()
def _play_with_ffplay(seg):
with NamedTemporaryFile("w+b", suffix=".wav") as f:
seg.export(f.name, "wav")
subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
def _play_with_pyaudio(seg):
import pyaudio
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(seg.sample_width),
channels=seg.channels,
rate=seg.frame_rate,
output=True)
stream.write(seg._data)
stream.stop_stream()
stream.close()
p.terminate()
def play(audio_segment):
try:
import pyaudio
_play_with_pyaudio(audio_segment)
except ImportError:
_play_with_ffplay(audio_segment)
|
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name
PLAYER = get_player_name()
+
+
+ def _play_with_ffplay(seg):
+ with NamedTemporaryFile("w+b", suffix=".wav") as f:
+ seg.export(f.name, "wav")
+ subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
+
+
+ def _play_with_pyaudio(seg):
+ import pyaudio
+
+ p = pyaudio.PyAudio()
+ stream = p.open(format=p.get_format_from_width(seg.sample_width),
+ channels=seg.channels,
+ rate=seg.frame_rate,
+ output=True)
+
+ stream.write(seg._data)
+ stream.stop_stream()
+ stream.close()
+
+ p.terminate()
+
+
def play(audio_segment):
- with NamedTemporaryFile("w+b", suffix=".wav") as f:
- audio_segment.export(f.name, "wav")
- subprocess.call([PLAYER, "-nodisp", "-autoexit", f.name])
+ try:
+ import pyaudio
+ _play_with_pyaudio(audio_segment)
+ except ImportError:
+ _play_with_ffplay(audio_segment)
+
|
91ffbe22e56387491775a569e237c4e46495c6a9
|
nyuki/workflow/tasks/task_selector.py
|
nyuki/workflow/tasks/task_selector.py
|
import logging
from tukio import Workflow
from tukio.task import register
from tukio.task.holder import TaskHolder
from nyuki.utils.evaluate import ConditionBlock
from nyuki.workflow.tasks.utils import generate_schema
log = logging.getLogger(__name__)
class TaskConditionBlock(ConditionBlock):
"""
Overrides work on ConditionBlock from the factory task to
set next workflow tasks.
"""
def __init__(self, conditions, workflow):
super().__init__(conditions)
self._workflow = workflow
def condition_validated(self, condition, data):
"""
Set next workflow tasks upon validating a condition.
"""
self._workflow.set_next_tasks(condition['tasks'])
@register('task_selector', 'execute')
class TaskSelector(TaskHolder):
SCHEMA = generate_schema(tasks={
'type': 'object',
'properties': {
'type': {'type': 'string', 'enum': ['task-selector']},
'tasks': {
'type': 'array',
'items': {
'type': 'string',
'minLength': 1,
'uniqueItems': True
}
}
}
})
async def execute(self, event):
data = event.data
workflow = Workflow.current_workflow()
for block in self.config['rules']:
if block['type'] == 'task-selector':
workflow.set_next_tasks(block['tasks'])
elif block['type'] == 'condition-block':
TaskConditionBlock(block['conditions'], workflow).apply(data)
return data
|
import logging
from tukio import Workflow
from tukio.task import register
from tukio.task.holder import TaskHolder
from nyuki.utils.evaluate import ConditionBlock
from nyuki.workflow.tasks.utils import generate_schema
log = logging.getLogger(__name__)
class TaskConditionBlock(ConditionBlock):
"""
Overrides work on ConditionBlock from the factory task to
set next workflow tasks.
"""
def __init__(self, conditions, workflow):
super().__init__(conditions)
self._workflow = workflow
def condition_validated(self, condition, data):
"""
Set next workflow tasks upon validating a condition.
"""
if condition['rules']:
self._workflow.set_next_tasks(condition['rules'][0]['tasks'])
@register('task_selector', 'execute')
class TaskSelector(TaskHolder):
SCHEMA = generate_schema(tasks={
'type': 'object',
'properties': {
'type': {'type': 'string', 'enum': ['task-selector']},
'tasks': {
'type': 'array',
'items': {
'type': 'string',
'minLength': 1,
'uniqueItems': True
}
}
}
})
async def execute(self, event):
data = event.data
workflow = Workflow.current_workflow()
for block in self.config['rules']:
if block['type'] == 'task-selector':
workflow.set_next_tasks(block['tasks'])
elif block['type'] == 'condition-block':
TaskConditionBlock(block['conditions'], workflow).apply(data)
return data
|
Fix an issue with the child-task selector.
|
Fix an issue with the child-task selector.
|
Python
|
apache-2.0
|
optiflows/nyuki,gdraynz/nyuki,optiflows/nyuki,gdraynz/nyuki
|
import logging
from tukio import Workflow
from tukio.task import register
from tukio.task.holder import TaskHolder
from nyuki.utils.evaluate import ConditionBlock
from nyuki.workflow.tasks.utils import generate_schema
log = logging.getLogger(__name__)
class TaskConditionBlock(ConditionBlock):
"""
Overrides work on ConditionBlock from the factory task to
set next workflow tasks.
"""
def __init__(self, conditions, workflow):
super().__init__(conditions)
self._workflow = workflow
def condition_validated(self, condition, data):
"""
Set next workflow tasks upon validating a condition.
"""
+ if condition['rules']:
- self._workflow.set_next_tasks(condition['tasks'])
+ self._workflow.set_next_tasks(condition['rules'][0]['tasks'])
@register('task_selector', 'execute')
class TaskSelector(TaskHolder):
SCHEMA = generate_schema(tasks={
'type': 'object',
'properties': {
'type': {'type': 'string', 'enum': ['task-selector']},
'tasks': {
'type': 'array',
'items': {
'type': 'string',
'minLength': 1,
'uniqueItems': True
}
}
}
})
async def execute(self, event):
data = event.data
workflow = Workflow.current_workflow()
for block in self.config['rules']:
if block['type'] == 'task-selector':
workflow.set_next_tasks(block['tasks'])
elif block['type'] == 'condition-block':
TaskConditionBlock(block['conditions'], workflow).apply(data)
return data
|
Fix an issue with the child-task selector.
|
## Code Before:
import logging
from tukio import Workflow
from tukio.task import register
from tukio.task.holder import TaskHolder
from nyuki.utils.evaluate import ConditionBlock
from nyuki.workflow.tasks.utils import generate_schema
log = logging.getLogger(__name__)
class TaskConditionBlock(ConditionBlock):
"""
Overrides work on ConditionBlock from the factory task to
set next workflow tasks.
"""
def __init__(self, conditions, workflow):
super().__init__(conditions)
self._workflow = workflow
def condition_validated(self, condition, data):
"""
Set next workflow tasks upon validating a condition.
"""
self._workflow.set_next_tasks(condition['tasks'])
@register('task_selector', 'execute')
class TaskSelector(TaskHolder):
SCHEMA = generate_schema(tasks={
'type': 'object',
'properties': {
'type': {'type': 'string', 'enum': ['task-selector']},
'tasks': {
'type': 'array',
'items': {
'type': 'string',
'minLength': 1,
'uniqueItems': True
}
}
}
})
async def execute(self, event):
data = event.data
workflow = Workflow.current_workflow()
for block in self.config['rules']:
if block['type'] == 'task-selector':
workflow.set_next_tasks(block['tasks'])
elif block['type'] == 'condition-block':
TaskConditionBlock(block['conditions'], workflow).apply(data)
return data
## Instruction:
Fix an issue with the child-task selector.
## Code After:
import logging
from tukio import Workflow
from tukio.task import register
from tukio.task.holder import TaskHolder
from nyuki.utils.evaluate import ConditionBlock
from nyuki.workflow.tasks.utils import generate_schema
log = logging.getLogger(__name__)
class TaskConditionBlock(ConditionBlock):
"""
Overrides work on ConditionBlock from the factory task to
set next workflow tasks.
"""
def __init__(self, conditions, workflow):
super().__init__(conditions)
self._workflow = workflow
def condition_validated(self, condition, data):
"""
Set next workflow tasks upon validating a condition.
"""
if condition['rules']:
self._workflow.set_next_tasks(condition['rules'][0]['tasks'])
@register('task_selector', 'execute')
class TaskSelector(TaskHolder):
SCHEMA = generate_schema(tasks={
'type': 'object',
'properties': {
'type': {'type': 'string', 'enum': ['task-selector']},
'tasks': {
'type': 'array',
'items': {
'type': 'string',
'minLength': 1,
'uniqueItems': True
}
}
}
})
async def execute(self, event):
data = event.data
workflow = Workflow.current_workflow()
for block in self.config['rules']:
if block['type'] == 'task-selector':
workflow.set_next_tasks(block['tasks'])
elif block['type'] == 'condition-block':
TaskConditionBlock(block['conditions'], workflow).apply(data)
return data
|
import logging
from tukio import Workflow
from tukio.task import register
from tukio.task.holder import TaskHolder
from nyuki.utils.evaluate import ConditionBlock
from nyuki.workflow.tasks.utils import generate_schema
log = logging.getLogger(__name__)
class TaskConditionBlock(ConditionBlock):
"""
Overrides work on ConditionBlock from the factory task to
set next workflow tasks.
"""
def __init__(self, conditions, workflow):
super().__init__(conditions)
self._workflow = workflow
def condition_validated(self, condition, data):
"""
Set next workflow tasks upon validating a condition.
"""
+ if condition['rules']:
- self._workflow.set_next_tasks(condition['tasks'])
+ self._workflow.set_next_tasks(condition['rules'][0]['tasks'])
? ++++ ++++++++++++
@register('task_selector', 'execute')
class TaskSelector(TaskHolder):
SCHEMA = generate_schema(tasks={
'type': 'object',
'properties': {
'type': {'type': 'string', 'enum': ['task-selector']},
'tasks': {
'type': 'array',
'items': {
'type': 'string',
'minLength': 1,
'uniqueItems': True
}
}
}
})
async def execute(self, event):
data = event.data
workflow = Workflow.current_workflow()
for block in self.config['rules']:
if block['type'] == 'task-selector':
workflow.set_next_tasks(block['tasks'])
elif block['type'] == 'condition-block':
TaskConditionBlock(block['conditions'], workflow).apply(data)
return data
|
e2f83a6a5d43ebc52d03d4059a7526a579a425c1
|
darkoob/social/models.py
|
darkoob/social/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
Set User Profile Unicode Function
|
Set User Profile Unicode Function
|
Python
|
mit
|
s1na/darkoob,s1na/darkoob,s1na/darkoob
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
+ def __unicode__(self):
+ return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
Set User Profile Unicode Function
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
## Instruction:
Set User Profile Unicode Function
## Code After:
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
+ def __unicode__(self):
+ return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.