commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
633c3a356a0ed88c00fbb1a5c972171de2255890
dinosaurs/transaction/database.py
dinosaurs/transaction/database.py
from peewee import * db = SqliteDatabase('emails.db') class Transaction(Model): cost = FloatField() address = CharField() tempPass = CharField() domain = CharField(index=True) email = CharField(primary_key=True, unique=True) is_complete = BooleanField(default=False, index=True) class Meta: database = db
from datetime import datetime from peewee import * from dinosaurs import settings from dinosaurs.transaction.coin import generate_address db = SqliteDatabase(settings.database) class Transaction(Model): cost = FloatField() address = CharField() started = DateField() tempPass = CharField() domain = CharField(index=True) email = CharField(primary_key=True, unique=True) is_complete = BooleanField(default=False, index=True) class Meta: database = db def __init__(self, *args, **kwargs): kwargs['started'] = datetime.now() kwargs['address'] = generate_address() super(Transaction, self).__init__(*args, **kwargs) @property def expired(self): return (datetime.now() - self.started).minutes > 4 @property def seconds_left(self): return (datetime.now() - self.started).total_seconds
Update what a transaction is
Update what a transaction is
Python
mit
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
+ from datetime import datetime + from peewee import * + from dinosaurs import settings + from dinosaurs.transaction.coin import generate_address + + - db = SqliteDatabase('emails.db') + db = SqliteDatabase(settings.database) class Transaction(Model): cost = FloatField() address = CharField() + started = DateField() tempPass = CharField() domain = CharField(index=True) email = CharField(primary_key=True, unique=True) is_complete = BooleanField(default=False, index=True) class Meta: database = db + def __init__(self, *args, **kwargs): + kwargs['started'] = datetime.now() + kwargs['address'] = generate_address() + super(Transaction, self).__init__(*args, **kwargs) + + @property + def expired(self): + return (datetime.now() - self.started).minutes > 4 + + @property + def seconds_left(self): + return (datetime.now() - self.started).total_seconds +
Update what a transaction is
## Code Before: from peewee import * db = SqliteDatabase('emails.db') class Transaction(Model): cost = FloatField() address = CharField() tempPass = CharField() domain = CharField(index=True) email = CharField(primary_key=True, unique=True) is_complete = BooleanField(default=False, index=True) class Meta: database = db ## Instruction: Update what a transaction is ## Code After: from datetime import datetime from peewee import * from dinosaurs import settings from dinosaurs.transaction.coin import generate_address db = SqliteDatabase(settings.database) class Transaction(Model): cost = FloatField() address = CharField() started = DateField() tempPass = CharField() domain = CharField(index=True) email = CharField(primary_key=True, unique=True) is_complete = BooleanField(default=False, index=True) class Meta: database = db def __init__(self, *args, **kwargs): kwargs['started'] = datetime.now() kwargs['address'] = generate_address() super(Transaction, self).__init__(*args, **kwargs) @property def expired(self): return (datetime.now() - self.started).minutes > 4 @property def seconds_left(self): return (datetime.now() - self.started).total_seconds
... from datetime import datetime from peewee import * ... from dinosaurs import settings from dinosaurs.transaction.coin import generate_address db = SqliteDatabase(settings.database) ... address = CharField() started = DateField() tempPass = CharField() ... database = db def __init__(self, *args, **kwargs): kwargs['started'] = datetime.now() kwargs['address'] = generate_address() super(Transaction, self).__init__(*args, **kwargs) @property def expired(self): return (datetime.now() - self.started).minutes > 4 @property def seconds_left(self): return (datetime.now() - self.started).total_seconds ...
b1c67321e5eec29b9fd91d728bd8e63382dc063a
src/keybar/conf/test.py
src/keybar/conf/test.py
from keybar.conf.base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'keybar_test', } } certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates') KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert') KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key') KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert') KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key') KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt') KEYBAR_VERIFY_CLIENT_CERTIFICATE = True KEYBAR_DOMAIN = 'local.keybar.io' KEYBAR_HOST = 'local.keybar.io:8443' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) CELERY_ALWAYS_EAGER = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True KEYBAR_HOST = 'local.keybar.io:9999' KEYBAR_KDF_ITERATIONS = 100
from keybar.conf.base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'keybar_test', } } certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates') KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert') KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key') KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert') KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key') KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt') KEYBAR_VERIFY_CLIENT_CERTIFICATE = True KEYBAR_DOMAIN = 'local.keybar.io' KEYBAR_HOST = 'local.keybar.io:9999' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) CELERY_ALWAYS_EAGER = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True KEYBAR_KDF_ITERATIONS = 100
Remove duplicate keybar host value
Remove duplicate keybar host value
Python
bsd-3-clause
keybar/keybar
from keybar.conf.base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'keybar_test', } } certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates') KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert') KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key') KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert') KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key') KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt') KEYBAR_VERIFY_CLIENT_CERTIFICATE = True KEYBAR_DOMAIN = 'local.keybar.io' - KEYBAR_HOST = 'local.keybar.io:8443' + KEYBAR_HOST = 'local.keybar.io:9999' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) CELERY_ALWAYS_EAGER = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True - KEYBAR_HOST = 'local.keybar.io:9999' KEYBAR_KDF_ITERATIONS = 100
Remove duplicate keybar host value
## Code Before: from keybar.conf.base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'keybar_test', } } certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates') KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert') KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key') KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert') KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key') KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt') KEYBAR_VERIFY_CLIENT_CERTIFICATE = True KEYBAR_DOMAIN = 'local.keybar.io' KEYBAR_HOST = 'local.keybar.io:8443' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) CELERY_ALWAYS_EAGER = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True KEYBAR_HOST = 'local.keybar.io:9999' KEYBAR_KDF_ITERATIONS = 100 ## Instruction: Remove duplicate keybar host value ## Code After: from keybar.conf.base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'keybar_test', } } certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates') KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert') KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key') KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert') KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key') KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt') KEYBAR_VERIFY_CLIENT_CERTIFICATE = True KEYBAR_DOMAIN = 'local.keybar.io' KEYBAR_HOST = 'local.keybar.io:9999' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) CELERY_ALWAYS_EAGER = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True KEYBAR_KDF_ITERATIONS = 100
# ... existing code ... KEYBAR_DOMAIN = 'local.keybar.io' KEYBAR_HOST = 'local.keybar.io:9999' # ... modified code ... # ... rest of the code ...
6dadf50366b1e142f96ef3bf4a356f7aa98f37be
geokey_export/__init__.py
geokey_export/__init__.py
from geokey.extensions.base import register VERSION = (0, 2, 0) __version__ = '.'.join(map(str, VERSION)) register( 'geokey_export', 'Export', display_admin=True, superuser=False, version=__version__ )
from geokey.extensions.base import register VERSION = (0, 2, 0) __version__ = '.'.join(map(str, VERSION)) register( 'geokey_export', 'Export', display_admin=True, superuser=False )
Undo previous commit (should not been on new branch) - Sorry!
Undo previous commit (should not been on new branch) - Sorry! Signed-off-by: Matthias Stevens <[email protected]>
Python
mit
ExCiteS/geokey-export,ExCiteS/geokey-export,ExCiteS/geokey-export
from geokey.extensions.base import register VERSION = (0, 2, 0) __version__ = '.'.join(map(str, VERSION)) register( 'geokey_export', 'Export', display_admin=True, - superuser=False, + superuser=False - version=__version__ )
Undo previous commit (should not been on new branch) - Sorry!
## Code Before: from geokey.extensions.base import register VERSION = (0, 2, 0) __version__ = '.'.join(map(str, VERSION)) register( 'geokey_export', 'Export', display_admin=True, superuser=False, version=__version__ ) ## Instruction: Undo previous commit (should not been on new branch) - Sorry! ## Code After: from geokey.extensions.base import register VERSION = (0, 2, 0) __version__ = '.'.join(map(str, VERSION)) register( 'geokey_export', 'Export', display_admin=True, superuser=False )
// ... existing code ... display_admin=True, superuser=False ) // ... rest of the code ...
b24fa6443e70cca01ff5059fe29ba6e33c0262ea
pylisp/packet/ip/protocol.py
pylisp/packet/ip/protocol.py
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes())
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
Split Protocol class in Protocol and ProtocolElement
Split Protocol class in Protocol and ProtocolElement
Python
bsd-3-clause
steffann/pylisp
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta - class Protocol(object): + class ProtocolElement(object): __metaclass__ = ABCMeta - header_type = None - @abstractmethod - def __init__(self, next_header=None, payload=''): + def __init__(self): ''' Constructor ''' - self.next_header = next_header - self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) + + def __str__(self): + return str(self.to_bytes()) + + def __bytes__(self): + return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' - def __str__(self): - return str(self.to_bytes()) - def __bytes__(self): - return bytes(self.to_bytes()) + class Protocol(ProtocolElement): + __metaclass__ = ABCMeta + header_type = None + + @abstractmethod + def __init__(self, next_header=None, payload=''): + ''' + Constructor + ''' + super(Protocol, self).__init__() + self.next_header = next_header + self.payload = payload +
Split Protocol class in Protocol and ProtocolElement
## Code Before: ''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes()) ## Instruction: Split Protocol class in Protocol and ProtocolElement ## Code After: ''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
# ... existing code ... class ProtocolElement(object): __metaclass__ = ABCMeta # ... modified code ... @abstractmethod def __init__(self): ''' ... ''' ... ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() ... class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload # ... rest of the code ...
0683e4fb0431563758d93b39d102d1c634a4535b
run.py
run.py
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subjects') ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subject') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
Change the subject url from /quip/subjects to /quip/subject.
Change the subject url from /quip/subjects to /quip/subject.
Python
bsd-2-clause
ohsu-qin/qiprofile-rest,ohsu-qin/qirest
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. - ext.add_model(models.Subject, url='subjects') + ext.add_model(models.Subject, url='subject') - ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
Change the subject url from /quip/subjects to /quip/subject.
## Code Before: import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subjects') ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run() ## Instruction: Change the subject url from /quip/subjects to /quip/subject. ## Code After: import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subject') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
... # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subject') ext.add_model(models.SessionDetail, url='session-detail') ...
555536b93609ab3b1c29475d51408aaf7eda4675
cray_test.py
cray_test.py
'''module for unit test and task for CI''' import sys import unittest from yatest import testpost, testpage, testutility, testconfig if __name__ == '__main__': all_test_suites = [] all_test_suites.append(testpost.get_test_suites()) all_test_suites.append(testpage.get_test_suites()) all_test_suites.append(testutility.get_test_suites()) all_test_suites.append(testconfig.get_test_suites()) alltests = unittest.TestSuite(all_test_suites) status = not unittest.TextTestRunner(verbosity=2).run(alltests).wasSuccessful() sys.exit(status)
'''module for unit test and task for CI''' import sys import unittest from yatest import testpost, testpage, testutility, testconfig, testgenerator, testpostmanager if __name__ == '__main__': all_test_suites = [] all_test_suites.append(testpost.get_test_suites()) all_test_suites.append(testpage.get_test_suites()) all_test_suites.append(testutility.get_test_suites()) all_test_suites.append(testconfig.get_test_suites()) all_test_suites.append(testgenerator.get_test_suites()) all_test_suites.append(testpostmanager.get_test_suites()) alltests = unittest.TestSuite(all_test_suites) status = not unittest.TextTestRunner(verbosity=2).run(alltests).wasSuccessful() sys.exit(status)
Add new added test cases to travis.
Add new added test cases to travis.
Python
mit
boluny/cray,boluny/cray
'''module for unit test and task for CI''' import sys import unittest - from yatest import testpost, testpage, testutility, testconfig + from yatest import testpost, testpage, testutility, testconfig, testgenerator, testpostmanager if __name__ == '__main__': all_test_suites = [] all_test_suites.append(testpost.get_test_suites()) all_test_suites.append(testpage.get_test_suites()) all_test_suites.append(testutility.get_test_suites()) all_test_suites.append(testconfig.get_test_suites()) + all_test_suites.append(testgenerator.get_test_suites()) + all_test_suites.append(testpostmanager.get_test_suites()) alltests = unittest.TestSuite(all_test_suites) status = not unittest.TextTestRunner(verbosity=2).run(alltests).wasSuccessful() sys.exit(status)
Add new added test cases to travis.
## Code Before: '''module for unit test and task for CI''' import sys import unittest from yatest import testpost, testpage, testutility, testconfig if __name__ == '__main__': all_test_suites = [] all_test_suites.append(testpost.get_test_suites()) all_test_suites.append(testpage.get_test_suites()) all_test_suites.append(testutility.get_test_suites()) all_test_suites.append(testconfig.get_test_suites()) alltests = unittest.TestSuite(all_test_suites) status = not unittest.TextTestRunner(verbosity=2).run(alltests).wasSuccessful() sys.exit(status) ## Instruction: Add new added test cases to travis. ## Code After: '''module for unit test and task for CI''' import sys import unittest from yatest import testpost, testpage, testutility, testconfig, testgenerator, testpostmanager if __name__ == '__main__': all_test_suites = [] all_test_suites.append(testpost.get_test_suites()) all_test_suites.append(testpage.get_test_suites()) all_test_suites.append(testutility.get_test_suites()) all_test_suites.append(testconfig.get_test_suites()) all_test_suites.append(testgenerator.get_test_suites()) all_test_suites.append(testpostmanager.get_test_suites()) alltests = unittest.TestSuite(all_test_suites) status = not unittest.TextTestRunner(verbosity=2).run(alltests).wasSuccessful() sys.exit(status)
... from yatest import testpost, testpage, testutility, testconfig, testgenerator, testpostmanager ... all_test_suites.append(testconfig.get_test_suites()) all_test_suites.append(testgenerator.get_test_suites()) all_test_suites.append(testpostmanager.get_test_suites()) ...
0e69efcd3a6992d0a34d7ecb80a76c3fbc52975c
pikka_bird_collector/sender.py
pikka_bird_collector/sender.py
import datetime import logging import json import urllib.parse import requests class Sender(): SERVER_SERVICES = { 'collections': '/collections'} REQUEST_HEADERS = { 'Content-Type': 'application/json'} def __init__(self, server_uri, logger=None): self.server_uri = server_uri self.logger = logger or logging.getLogger() def send(self, collection): url = self.__service_url('collections') data = json.dumps(collection) t_0 = datetime.datetime.utcnow() self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8'))))) try: r = requests.post(url, data=data, headers=self.REQUEST_HEADERS) r.raise_for_status() logger = self.logger.info except requests.exceptions.HTTPError: logger = self.logger.error t = datetime.datetime.utcnow() logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds)) def __service_url(self, service): service_path = self.SERVER_SERVICES[service] return urllib.parse.urljoin(self.server_uri, service_path)
import datetime import logging import json import urllib.parse import requests class Sender(): SERVER_SERVICES = { 'collections': '/collections'} REQUEST_HEADERS = { 'Content-Type': 'application/json'} def __init__(self, server_uri, logger=None): self.server_uri = server_uri self.logger = logger or logging.getLogger() def send(self, collection): url = self.__service_url('collections') data = json.dumps(collection) t_0 = datetime.datetime.utcnow() self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8'))))) try: r = requests.post(url, data=data, headers=self.REQUEST_HEADERS) r.raise_for_status() status = True except requests.exceptions.HTTPError: status = False except requests.exceptions.ConnectionError: self.logger.error("CONNECTION FAILED") return False logger = self.logger.info if status else self.logger.error try: text = r.text except ValueError: text = None t = datetime.datetime.utcnow() logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds)) return status def __service_url(self, service): service_path = self.SERVER_SERVICES[service] return urllib.parse.urljoin(self.server_uri, service_path)
Extend Sender.send() to return boolean status.
Extend Sender.send() to return boolean status.
Python
mit
tiredpixel/pikka-bird-collector-py
import datetime import logging import json import urllib.parse import requests class Sender(): SERVER_SERVICES = { 'collections': '/collections'} REQUEST_HEADERS = { 'Content-Type': 'application/json'} def __init__(self, server_uri, logger=None): self.server_uri = server_uri self.logger = logger or logging.getLogger() def send(self, collection): url = self.__service_url('collections') data = json.dumps(collection) t_0 = datetime.datetime.utcnow() self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8'))))) try: r = requests.post(url, data=data, headers=self.REQUEST_HEADERS) r.raise_for_status() - logger = self.logger.info + status = True except requests.exceptions.HTTPError: - logger = self.logger.error + status = False + except requests.exceptions.ConnectionError: + self.logger.error("CONNECTION FAILED") + return False + + logger = self.logger.info if status else self.logger.error + + try: + text = r.text + except ValueError: + text = None t = datetime.datetime.utcnow() - logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds)) + logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds)) + + return status def __service_url(self, service): service_path = self.SERVER_SERVICES[service] return urllib.parse.urljoin(self.server_uri, service_path)
Extend Sender.send() to return boolean status.
## Code Before: import datetime import logging import json import urllib.parse import requests class Sender(): SERVER_SERVICES = { 'collections': '/collections'} REQUEST_HEADERS = { 'Content-Type': 'application/json'} def __init__(self, server_uri, logger=None): self.server_uri = server_uri self.logger = logger or logging.getLogger() def send(self, collection): url = self.__service_url('collections') data = json.dumps(collection) t_0 = datetime.datetime.utcnow() self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8'))))) try: r = requests.post(url, data=data, headers=self.REQUEST_HEADERS) r.raise_for_status() logger = self.logger.info except requests.exceptions.HTTPError: logger = self.logger.error t = datetime.datetime.utcnow() logger("SENT %d %s (%s s)" % (r.status_code, r.text, (t - t_0).seconds)) def __service_url(self, service): service_path = self.SERVER_SERVICES[service] return urllib.parse.urljoin(self.server_uri, service_path) ## Instruction: Extend Sender.send() to return boolean status. ## Code After: import datetime import logging import json import urllib.parse import requests class Sender(): SERVER_SERVICES = { 'collections': '/collections'} REQUEST_HEADERS = { 'Content-Type': 'application/json'} def __init__(self, server_uri, logger=None): self.server_uri = server_uri self.logger = logger or logging.getLogger() def send(self, collection): url = self.__service_url('collections') data = json.dumps(collection) t_0 = datetime.datetime.utcnow() self.logger.info("SENDING %s (%d b)" % (url, (len(data.encode('utf-8'))))) try: r = requests.post(url, data=data, headers=self.REQUEST_HEADERS) r.raise_for_status() status = True except requests.exceptions.HTTPError: status = False except requests.exceptions.ConnectionError: self.logger.error("CONNECTION FAILED") return False logger = self.logger.info if status else self.logger.error try: text = r.text except ValueError: text = None t = datetime.datetime.utcnow() logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds)) return status def __service_url(self, service): service_path = self.SERVER_SERVICES[service] return urllib.parse.urljoin(self.server_uri, service_path)
... r.raise_for_status() status = True except requests.exceptions.HTTPError: status = False except requests.exceptions.ConnectionError: self.logger.error("CONNECTION FAILED") return False logger = self.logger.info if status else self.logger.error try: text = r.text except ValueError: text = None ... t = datetime.datetime.utcnow() logger("SENT %d %s (%s s)" % (r.status_code, text, (t - t_0).seconds)) return status ...
99be8919a0bc274dc311ebe3201dfc490a1d0d07
setup.py
setup.py
import os from distutils.core import setup, find_packages # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.test"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
import os from distutils.core import setup # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
Remove find_packages import, it's not in distutils
Remove find_packages import, it's not in distutils
Python
bsd-2-clause
blaze/datashape,cowlicks/datashape,ContinuumIO/datashape,cpcloud/datashape,aterrel/datashape,quantopian/datashape,FrancescAlted/datashape,quantopian/datashape,aterrel/datashape,cowlicks/datashape,markflorisson/datashape,ContinuumIO/datashape,cpcloud/datashape,blaze/datashape,llllllllll/datashape,markflorisson/datashape,FrancescAlted/datashape,llllllllll/datashape
import os - from distutils.core import setup, find_packages + from distutils.core import setup # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", - packages = ["datashape", "datashape.test"], + packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
Remove find_packages import, it's not in distutils
## Code Before: import os from distutils.core import setup, find_packages # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.test"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], ) ## Instruction: Remove find_packages import, it's not in distutils ## Code After: import os from distutils.core import setup # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
// ... existing code ... import os from distutils.core import setup // ... modified code ... url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), // ... rest of the code ...
248fda4f499375b24a2f670569259f0904948b7e
troposphere/detective.py
troposphere/detective.py
from . import AWSObject from .validators import boolean class Graph(AWSObject): resource_type = "AWS::Detective::Graph" props = {} class MemberInvitation(AWSObject): resource_type = "AWS::Detective::MemberInvitation" props = { "DisableEmailNotification": (boolean, False), "GraphArn": (str, True), "MemberEmailAddress": (str, True), "MemberId": (str, True), "Message": (str, False), }
from . import AWSObject, Tags from .validators import boolean class Graph(AWSObject): resource_type = "AWS::Detective::Graph" props = { "Tags": (Tags, False), } class MemberInvitation(AWSObject): resource_type = "AWS::Detective::MemberInvitation" props = { "DisableEmailNotification": (boolean, False), "GraphArn": (str, True), "MemberEmailAddress": (str, True), "MemberId": (str, True), "Message": (str, False), }
Update Detective per 2021-04-29 changes
Update Detective per 2021-04-29 changes
Python
bsd-2-clause
cloudtools/troposphere,cloudtools/troposphere
- from . import AWSObject + from . import AWSObject, Tags from .validators import boolean class Graph(AWSObject): resource_type = "AWS::Detective::Graph" - props = {} + props = { + "Tags": (Tags, False), + } class MemberInvitation(AWSObject): resource_type = "AWS::Detective::MemberInvitation" props = { "DisableEmailNotification": (boolean, False), "GraphArn": (str, True), "MemberEmailAddress": (str, True), "MemberId": (str, True), "Message": (str, False), }
Update Detective per 2021-04-29 changes
## Code Before: from . import AWSObject from .validators import boolean class Graph(AWSObject): resource_type = "AWS::Detective::Graph" props = {} class MemberInvitation(AWSObject): resource_type = "AWS::Detective::MemberInvitation" props = { "DisableEmailNotification": (boolean, False), "GraphArn": (str, True), "MemberEmailAddress": (str, True), "MemberId": (str, True), "Message": (str, False), } ## Instruction: Update Detective per 2021-04-29 changes ## Code After: from . import AWSObject, Tags from .validators import boolean class Graph(AWSObject): resource_type = "AWS::Detective::Graph" props = { "Tags": (Tags, False), } class MemberInvitation(AWSObject): resource_type = "AWS::Detective::MemberInvitation" props = { "DisableEmailNotification": (boolean, False), "GraphArn": (str, True), "MemberEmailAddress": (str, True), "MemberId": (str, True), "Message": (str, False), }
# ... existing code ... from . import AWSObject, Tags from .validators import boolean # ... modified code ... props = { "Tags": (Tags, False), } # ... rest of the code ...
80d710269ff1d6421f4a29b9a0d424868cb5ec54
flaskiwsapp/auth/jwt.py
flaskiwsapp/auth/jwt.py
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
Raise JWTErrror into authentication handler
Raise JWTErrror into authentication handler
Python
mit
rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify + from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException + from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): + try: - user = get_user_by_email(email) + user = get_user_by_email(email) - if user and user.check_password(password) and user.is_active: + if user and user.check_password(password) and user.is_active: - return user + return user + except UserDoesnotExistsException as e: + raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers + elif isinstance(e, Exception): + error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) + error.update({'title': str(type(e))}) + error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) + error.update({'code': DUMMY_ERROR_CODE}) + response_dict['data'] = error + return jsonify(response_dict), e.status_code, e.headers
Raise JWTErrror into authentication handler
## Code Before: from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers ## Instruction: Raise JWTErrror into authentication handler ## Code After: from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
// ... existing code ... from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR // ... modified code ... def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) ... return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers // ... rest of the code ...
bdc0466c63347280fbd8bc8c30fb07f294200194
client/third_party/idna/__init__.py
client/third_party/idna/__init__.py
def encode(host, uts46): return unicode(host)
from encodings import idna def encode(host, uts46=False): # pylint: disable=unused-argument # Used by urllib3 return idna.ToASCII(host) def decode(host): # Used by cryptography/hazmat/backends/openssl/x509.py return idna.ToUnicode(host)
Change idna stub to use python's default
[client] Change idna stub to use python's default Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke select bots running inside docker. The new stub is still simpler than https://pypi.org/project/idna/ and lighter weight but much better than ignoring the "xn-" encoding as this was done previously. As per the project home page: This acts as a suitable replacement for the “encodings.idna” module that comes with the Python standard library, but only supports the old, deprecated IDNA specification (RFC 3490). In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal for us. decode() is required by openssl/x509.py. [email protected] Bug: 916644 Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244 Reviewed-by: Marc-Antoine Ruel <[email protected]> Commit-Queue: Marc-Antoine Ruel <[email protected]>
Python
apache-2.0
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
- def encode(host, uts46): - return unicode(host) + from encodings import idna + + + def encode(host, uts46=False): # pylint: disable=unused-argument + # Used by urllib3 + return idna.ToASCII(host) + + + def decode(host): + # Used by cryptography/hazmat/backends/openssl/x509.py + return idna.ToUnicode(host) +
Change idna stub to use python's default
## Code Before: def encode(host, uts46): return unicode(host) ## Instruction: Change idna stub to use python's default ## Code After: from encodings import idna def encode(host, uts46=False): # pylint: disable=unused-argument # Used by urllib3 return idna.ToASCII(host) def decode(host): # Used by cryptography/hazmat/backends/openssl/x509.py return idna.ToUnicode(host)
# ... existing code ... from encodings import idna def encode(host, uts46=False): # pylint: disable=unused-argument # Used by urllib3 return idna.ToASCII(host) def decode(host): # Used by cryptography/hazmat/backends/openssl/x509.py return idna.ToUnicode(host) # ... rest of the code ...
35529cfd3f93723e8d60b43f58419385137b9a01
saltapi/cli.py
saltapi/cli.py
''' CLI entry-point for salt-api ''' # Import salt libs from salt.utils.parsers import ( ConfigDirMixIn, DaemonMixIn, LogLevelMixIn, MergeConfigMixIn, OptionParser, OptionParserMeta, PidfileMixin) # Import salt-api libs import saltapi.client import saltapi.config import saltapi.version class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin, DaemonMixIn, MergeConfigMixIn): ''' The cli parser object used to fire up the salt api system. ''' __metaclass__ = OptionParserMeta VERSION = saltapi.version.__version__ def setup_config(self): return saltapi.config.api_config(self.get_config_file_path('master')) def run(self): ''' Run the api ''' self.parse_args() self.process_config_dir() self.daemonize_if_required() self.set_pidfile() client = saltapi.client.SaltAPIClient(self.config) client.run()
''' CLI entry-point for salt-api ''' # Import salt libs from salt.utils.parsers import ( ConfigDirMixIn, DaemonMixIn, LogLevelMixIn, MergeConfigMixIn, OptionParser, OptionParserMeta, PidfileMixin) # Import salt-api libs import saltapi.client import saltapi.config import saltapi.version class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin, DaemonMixIn, MergeConfigMixIn): ''' The cli parser object used to fire up the salt api system. ''' __metaclass__ = OptionParserMeta VERSION = saltapi.version.__version__ def setup_config(self): return saltapi.config.api_config(self.get_config_file_path('master')) def run(self): ''' Run the api ''' self.parse_args() self.daemonize_if_required() self.set_pidfile() client = saltapi.client.SaltAPIClient(self.config) client.run()
Remove unnecessary call to `process_config_dir()`.
Remove unnecessary call to `process_config_dir()`.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' CLI entry-point for salt-api ''' # Import salt libs from salt.utils.parsers import ( ConfigDirMixIn, DaemonMixIn, LogLevelMixIn, MergeConfigMixIn, OptionParser, OptionParserMeta, PidfileMixin) # Import salt-api libs import saltapi.client import saltapi.config import saltapi.version class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin, DaemonMixIn, MergeConfigMixIn): ''' The cli parser object used to fire up the salt api system. ''' __metaclass__ = OptionParserMeta VERSION = saltapi.version.__version__ def setup_config(self): return saltapi.config.api_config(self.get_config_file_path('master')) def run(self): ''' Run the api ''' self.parse_args() - self.process_config_dir() self.daemonize_if_required() self.set_pidfile() client = saltapi.client.SaltAPIClient(self.config) client.run()
Remove unnecessary call to `process_config_dir()`.
## Code Before: ''' CLI entry-point for salt-api ''' # Import salt libs from salt.utils.parsers import ( ConfigDirMixIn, DaemonMixIn, LogLevelMixIn, MergeConfigMixIn, OptionParser, OptionParserMeta, PidfileMixin) # Import salt-api libs import saltapi.client import saltapi.config import saltapi.version class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin, DaemonMixIn, MergeConfigMixIn): ''' The cli parser object used to fire up the salt api system. ''' __metaclass__ = OptionParserMeta VERSION = saltapi.version.__version__ def setup_config(self): return saltapi.config.api_config(self.get_config_file_path('master')) def run(self): ''' Run the api ''' self.parse_args() self.process_config_dir() self.daemonize_if_required() self.set_pidfile() client = saltapi.client.SaltAPIClient(self.config) client.run() ## Instruction: Remove unnecessary call to `process_config_dir()`. ## Code After: ''' CLI entry-point for salt-api ''' # Import salt libs from salt.utils.parsers import ( ConfigDirMixIn, DaemonMixIn, LogLevelMixIn, MergeConfigMixIn, OptionParser, OptionParserMeta, PidfileMixin) # Import salt-api libs import saltapi.client import saltapi.config import saltapi.version class SaltAPI(OptionParser, ConfigDirMixIn, LogLevelMixIn, PidfileMixin, DaemonMixIn, MergeConfigMixIn): ''' The cli parser object used to fire up the salt api system. ''' __metaclass__ = OptionParserMeta VERSION = saltapi.version.__version__ def setup_config(self): return saltapi.config.api_config(self.get_config_file_path('master')) def run(self): ''' Run the api ''' self.parse_args() self.daemonize_if_required() self.set_pidfile() client = saltapi.client.SaltAPIClient(self.config) client.run()
# ... existing code ... self.parse_args() self.daemonize_if_required() # ... rest of the code ...
0d1aa7e08ef2572d2e13218d7d8942d8d2a7550e
app/logic/latexprinter.py
app/logic/latexprinter.py
import sympy from sympy.printing.latex import LatexPrinter class GammaLatexPrinter(LatexPrinter): def _needs_function_brackets(self, expr): if expr.func == sympy.Abs: return False return super(GammaLatexPrinter, self)._needs_function_brackets(expr) def latex(expr, **settings): settings['fold_func_brackets'] = True return GammaLatexPrinter(settings).doprint(expr)
import sympy from sympy.printing.latex import LatexPrinter class GammaLatexPrinter(LatexPrinter): def _needs_function_brackets(self, expr): if expr.func == sympy.Abs: return False return super(GammaLatexPrinter, self)._needs_function_brackets(expr) def latex(expr, **settings): settings['fold_func_brackets'] = True settings['inv_trig_style'] = 'power' return GammaLatexPrinter(settings).doprint(expr)
Print inverse trig functions using powers
Print inverse trig functions using powers
Python
bsd-3-clause
bolshoibooze/sympy_gamma,iScienceLuvr/sympy_gamma,debugger22/sympy_gamma,debugger22/sympy_gamma,iScienceLuvr/sympy_gamma,kaichogami/sympy_gamma,bolshoibooze/sympy_gamma,iScienceLuvr/sympy_gamma,kaichogami/sympy_gamma,bolshoibooze/sympy_gamma,github4ry/sympy_gamma,github4ry/sympy_gamma,github4ry/sympy_gamma,kaichogami/sympy_gamma
import sympy from sympy.printing.latex import LatexPrinter class GammaLatexPrinter(LatexPrinter): def _needs_function_brackets(self, expr): if expr.func == sympy.Abs: return False return super(GammaLatexPrinter, self)._needs_function_brackets(expr) def latex(expr, **settings): settings['fold_func_brackets'] = True + settings['inv_trig_style'] = 'power' return GammaLatexPrinter(settings).doprint(expr)
Print inverse trig functions using powers
## Code Before: import sympy from sympy.printing.latex import LatexPrinter class GammaLatexPrinter(LatexPrinter): def _needs_function_brackets(self, expr): if expr.func == sympy.Abs: return False return super(GammaLatexPrinter, self)._needs_function_brackets(expr) def latex(expr, **settings): settings['fold_func_brackets'] = True return GammaLatexPrinter(settings).doprint(expr) ## Instruction: Print inverse trig functions using powers ## Code After: import sympy from sympy.printing.latex import LatexPrinter class GammaLatexPrinter(LatexPrinter): def _needs_function_brackets(self, expr): if expr.func == sympy.Abs: return False return super(GammaLatexPrinter, self)._needs_function_brackets(expr) def latex(expr, **settings): settings['fold_func_brackets'] = True settings['inv_trig_style'] = 'power' return GammaLatexPrinter(settings).doprint(expr)
# ... existing code ... settings['fold_func_brackets'] = True settings['inv_trig_style'] = 'power' return GammaLatexPrinter(settings).doprint(expr) # ... rest of the code ...
6e9e6c0fbba6b1f6e97c40181ec58c55e4980995
pyipmi/fw.py
pyipmi/fw.py
class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass
class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) def __eq__(self, other): if isinstance(other, self.__class__): return (self.slot == other.slot and \ self.type == other.type and \ self.offset == other.offset and \ self.size == other.size and \ self.flags == other.flags) else: return False class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass
Add equality operator to FWInfo
Add equality operator to FWInfo
Python
bsd-3-clause
Cynerva/pyipmi,emaadmanzoor/pyipmi
class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return (self.slot == other.slot and \ + self.type == other.type and \ + self.offset == other.offset and \ + self.size == other.size and \ + self.flags == other.flags) + else: + return False class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass
Add equality operator to FWInfo
## Code Before: class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass ## Instruction: Add equality operator to FWInfo ## Code After: class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) def __eq__(self, other): if isinstance(other, self.__class__): return (self.slot == other.slot and \ self.type == other.type and \ self.offset == other.offset and \ self.size == other.size and \ self.flags == other.flags) else: return False class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass
// ... existing code ... self.size, self.flags) def __eq__(self, other): if isinstance(other, self.__class__): return (self.slot == other.slot and \ self.type == other.type and \ self.offset == other.offset and \ self.size == other.size and \ self.flags == other.flags) else: return False // ... rest of the code ...
82954f3df7e3b8f0a4cb921e40f351938451221d
cd/lambdas/pipeline-fail-notification/lambda_function.py
cd/lambdas/pipeline-fail-notification/lambda_function.py
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') period_start = datetime.now() - timedelta(seconds=60) pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: exec = action_state['latestExecution'] if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() put_job_failure(job, 'Function exception: ' + str(e))
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: if 'latestExecution' in action_state: execution = action_state['latestExecution'] timezone = execution['lastStatusChange'].tzinfo period_start = datetime.now(timezone) - timedelta(seconds=60) if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc()
Fix CD fail lambda python
Fix CD fail lambda python
Python
mit
PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') - period_start = datetime.now() - timedelta(seconds=60) - pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: + if 'latestExecution' in action_state: - exec = action_state['latestExecution'] + execution = action_state['latestExecution'] + timezone = execution['lastStatusChange'].tzinfo + period_start = datetime.now(timezone) - timedelta(seconds=60) + - if execution['lastStatusChange'] > period_start: + if execution['lastStatusChange'] > period_start: - if execution['status'] == 'Failed': + if execution['status'] == 'Failed': - post_notification(action_state) + post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() - put_job_failure(job, 'Function exception: ' + str(e))
Fix CD fail lambda python
## Code Before: import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') period_start = datetime.now() - timedelta(seconds=60) pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: exec = action_state['latestExecution'] if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() put_job_failure(job, 'Function exception: ' + str(e)) ## Instruction: Fix CD fail lambda python ## Code After: import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: if 'latestExecution' in action_state: execution = action_state['latestExecution'] timezone = execution['lastStatusChange'].tzinfo period_start = datetime.now(timezone) - timedelta(seconds=60) if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc()
# ... existing code ... pipeline_name = os.environ['PIPELINE_NAME'] # ... modified code ... for action_state in stage_state['actionStates']: if 'latestExecution' in action_state: execution = action_state['latestExecution'] timezone = execution['lastStatusChange'].tzinfo period_start = datetime.now(timezone) - timedelta(seconds=60) if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) ... traceback.print_exc() # ... rest of the code ...
d3f33af2fa7d4e7bf9969752e696aaf8120642bc
panoptes/environment/weather_station.py
panoptes/environment/weather_station.py
import datetime import zmq from . import monitor from panoptes.utils import logger, config, messaging, threads @logger.has_logger @config.has_config class WeatherStation(monitor.EnvironmentalMonitor): """ This object is used to determine the weather safe/unsafe condition. It inherits from the monitor.EnvironmentalMonitor base class. It listens on the 'weather' channel of the messaging system. Config: weather_station.port (int): Port to publish to. Defaults to 6500 weather_station.channel (str): the channel topic to publish on. Defaults to 'weather' Args: messaging (panoptes.messaging.Messaging): A messaging Object for creating new sockets. """ def __init__(self, messaging=None, connect_on_startup=False): super().__init__(messaging=messaging, name='WeatherStation') # Get the messaging information self.port = self.config.get('messaging').get('messaging_port', 6500) self.channel = self.config.get('messaging').get('channel', 'weather') # Create our Publishing socket self.socket = self.messaging.create_subscriber(port=self.port, channel=self.channel) if connect_on_startup: self.start_monitoring() def monitor(self): """ Reads serial information off the attached weather station and publishes message with status """ self.send_message('UNSAFE')
import datetime import zmq from . import monitor from panoptes.utils import logger, config, messaging, threads @logger.has_logger @config.has_config class WeatherStation(monitor.EnvironmentalMonitor): """ This object is used to determine the weather safe/unsafe condition. It inherits from the monitor.EnvironmentalMonitor base class. It listens on the 'weather' channel of the messaging system. Config: weather_station.port (int): Port to publish to. Defaults to 6500 weather_station.channel (str): the channel topic to publish on. Defaults to 'weather' Args: messaging (panoptes.messaging.Messaging): A messaging Object for creating new sockets. """ def __init__(self, messaging=None, connect_on_startup=False): super().__init__(messaging=messaging, name='WeatherStation') # Get the messaging information self.port = self.config.get('messaging').get('messaging_port', 6500) self.channel = self.config.get('messaging').get('channel', 'weather') # Create our Publishing socket self.socket = self.messaging.create_subscriber(port=self.port, channel=self.channel) if connect_on_startup: self.start_monitoring() def monitor(self): """ Reads serial information off the attached weather station """ msg = self.socket.recv_json()
Set up weather station to receive updates
Set up weather station to receive updates
Python
mit
Guokr1991/POCS,fmin2958/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS,fmin2958/POCS,Guokr1991/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS,joshwalawender/POCS,Guokr1991/POCS,AstroHuntsman/POCS,fmin2958/POCS,joshwalawender/POCS,joshwalawender/POCS,Guokr1991/POCS
import datetime import zmq from . import monitor from panoptes.utils import logger, config, messaging, threads @logger.has_logger @config.has_config class WeatherStation(monitor.EnvironmentalMonitor): """ This object is used to determine the weather safe/unsafe condition. It inherits from the monitor.EnvironmentalMonitor base class. It listens on the 'weather' channel of the messaging system. Config: weather_station.port (int): Port to publish to. Defaults to 6500 weather_station.channel (str): the channel topic to publish on. Defaults to 'weather' Args: messaging (panoptes.messaging.Messaging): A messaging Object for creating new sockets. """ def __init__(self, messaging=None, connect_on_startup=False): super().__init__(messaging=messaging, name='WeatherStation') # Get the messaging information self.port = self.config.get('messaging').get('messaging_port', 6500) self.channel = self.config.get('messaging').get('channel', 'weather') # Create our Publishing socket self.socket = self.messaging.create_subscriber(port=self.port, channel=self.channel) if connect_on_startup: self.start_monitoring() def monitor(self): """ - Reads serial information off the attached weather station and publishes + Reads serial information off the attached weather station - message with status """ - self.send_message('UNSAFE') + msg = self.socket.recv_json()
Set up weather station to receive updates
## Code Before: import datetime import zmq from . import monitor from panoptes.utils import logger, config, messaging, threads @logger.has_logger @config.has_config class WeatherStation(monitor.EnvironmentalMonitor): """ This object is used to determine the weather safe/unsafe condition. It inherits from the monitor.EnvironmentalMonitor base class. It listens on the 'weather' channel of the messaging system. Config: weather_station.port (int): Port to publish to. Defaults to 6500 weather_station.channel (str): the channel topic to publish on. Defaults to 'weather' Args: messaging (panoptes.messaging.Messaging): A messaging Object for creating new sockets. """ def __init__(self, messaging=None, connect_on_startup=False): super().__init__(messaging=messaging, name='WeatherStation') # Get the messaging information self.port = self.config.get('messaging').get('messaging_port', 6500) self.channel = self.config.get('messaging').get('channel', 'weather') # Create our Publishing socket self.socket = self.messaging.create_subscriber(port=self.port, channel=self.channel) if connect_on_startup: self.start_monitoring() def monitor(self): """ Reads serial information off the attached weather station and publishes message with status """ self.send_message('UNSAFE') ## Instruction: Set up weather station to receive updates ## Code After: import datetime import zmq from . import monitor from panoptes.utils import logger, config, messaging, threads @logger.has_logger @config.has_config class WeatherStation(monitor.EnvironmentalMonitor): """ This object is used to determine the weather safe/unsafe condition. It inherits from the monitor.EnvironmentalMonitor base class. It listens on the 'weather' channel of the messaging system. Config: weather_station.port (int): Port to publish to. Defaults to 6500 weather_station.channel (str): the channel topic to publish on. Defaults to 'weather' Args: messaging (panoptes.messaging.Messaging): A messaging Object for creating new sockets. """ def __init__(self, messaging=None, connect_on_startup=False): super().__init__(messaging=messaging, name='WeatherStation') # Get the messaging information self.port = self.config.get('messaging').get('messaging_port', 6500) self.channel = self.config.get('messaging').get('channel', 'weather') # Create our Publishing socket self.socket = self.messaging.create_subscriber(port=self.port, channel=self.channel) if connect_on_startup: self.start_monitoring() def monitor(self): """ Reads serial information off the attached weather station """ msg = self.socket.recv_json()
# ... existing code ... """ Reads serial information off the attached weather station """ msg = self.socket.recv_json() # ... rest of the code ...
091a08a8fe30c3cc00c6b85552e47a1b15b807b8
preferences/views.py
preferences/views.py
from django.shortcuts import render # Create your views here. from registration.views import RegistrationView from registration.forms import RegistrationFormUniqueEmail class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail
from django.shortcuts import render from django.views.generic.edit import FormView from registration.forms import RegistrationFormUniqueEmail from registration.backends.default.views import RegistrationView from preferences.forms import PreferencesForm class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail class UserPreferences(FormView): template_name = 'preferences/preferences.html' form_class = PreferencesForm success_url = '/index/' def form_valid(self, form): return super(UserPreferences, self).form_valid(form)
Add userprefs and email reg view
Add userprefs and email reg view
Python
mit
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
from django.shortcuts import render + from django.views.generic.edit import FormView - # Create your views here. + from registration.forms import RegistrationFormUniqueEmail + from registration.backends.default.views import RegistrationView - from registration.views import RegistrationView - from registration.forms import RegistrationFormUniqueEmail + from preferences.forms import PreferencesForm + class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail + + + class UserPreferences(FormView): + template_name = 'preferences/preferences.html' + form_class = PreferencesForm + success_url = '/index/' + + def form_valid(self, form): + return super(UserPreferences, self).form_valid(form)
Add userprefs and email reg view
## Code Before: from django.shortcuts import render # Create your views here. from registration.views import RegistrationView from registration.forms import RegistrationFormUniqueEmail class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail ## Instruction: Add userprefs and email reg view ## Code After: from django.shortcuts import render from django.views.generic.edit import FormView from registration.forms import RegistrationFormUniqueEmail from registration.backends.default.views import RegistrationView from preferences.forms import PreferencesForm class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail class UserPreferences(FormView): template_name = 'preferences/preferences.html' form_class = PreferencesForm success_url = '/index/' def form_valid(self, form): return super(UserPreferences, self).form_valid(form)
... from django.shortcuts import render from django.views.generic.edit import FormView from registration.forms import RegistrationFormUniqueEmail from registration.backends.default.views import RegistrationView from preferences.forms import PreferencesForm ... form_class = RegistrationFormUniqueEmail class UserPreferences(FormView): template_name = 'preferences/preferences.html' form_class = PreferencesForm success_url = '/index/' def form_valid(self, form): return super(UserPreferences, self).form_valid(form) ...
2864441be365beb40e0396b444f8d96af8d7d92e
aleph/logic/documents.py
aleph/logic/documents.py
import os import logging from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) def crawl_directory(collection, path, parent=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() ingest_entity(collection, document.to_proxy()) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): crawl_directory(collection, child, document)
import os import logging from servicelayer.jobs import Job from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) def crawl_directory(collection, path, parent=None, job_id=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() job_id = job_id or Job.random_id() ingest_entity(collection, document.to_proxy(), job_id=job_id) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): crawl_directory(collection, child, document, job_id)
Make stable job IDs in ingest runs
Make stable job IDs in ingest runs
Python
mit
alephdata/aleph,pudo/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph,alephdata/aleph,alephdata/aleph,pudo/aleph
import os import logging + from servicelayer.jobs import Job from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) - def crawl_directory(collection, path, parent=None): + def crawl_directory(collection, path, parent=None, job_id=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() + job_id = job_id or Job.random_id() - ingest_entity(collection, document.to_proxy()) + ingest_entity(collection, document.to_proxy(), job_id=job_id) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): - crawl_directory(collection, child, document) + crawl_directory(collection, child, document, job_id)
Make stable job IDs in ingest runs
## Code Before: import os import logging from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) def crawl_directory(collection, path, parent=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() ingest_entity(collection, document.to_proxy()) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): crawl_directory(collection, child, document) ## Instruction: Make stable job IDs in ingest runs ## Code After: import os import logging from servicelayer.jobs import Job from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) def crawl_directory(collection, path, parent=None, job_id=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() job_id = job_id or Job.random_id() ingest_entity(collection, document.to_proxy(), job_id=job_id) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): crawl_directory(collection, child, document, job_id)
# ... existing code ... import logging from servicelayer.jobs import Job # ... modified code ... def crawl_directory(collection, path, parent=None, job_id=None): """Crawl the contents of the given path.""" ... db.session.commit() job_id = job_id or Job.random_id() ingest_entity(collection, document.to_proxy(), job_id=job_id) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) ... for child in path.iterdir(): crawl_directory(collection, child, document, job_id) # ... rest of the code ...
19656decb756db364d012cbfb13d0ddf30e15bae
py/tests/test_runner.py
py/tests/test_runner.py
import os import sys import pytest dist = sys.argv[2] path = os.getenv("PYTHONPATH") if path is None: path = dist else: path = "{}:{}".format(dist, path) os.environ["PYTHONPATH"] = path os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable os.environ['PYSPARK_PYTHON'] = sys.executable sys.path.insert(0, dist) pytestConfigArgs = sys.argv[1].replace("'", "").split(" ") args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])] code = pytest.main(args) sys.exit(code)
import os import sys import pytest dist = sys.argv[2] path = os.getenv("PYTHONPATH") if path is None: path = dist else: path = "{}:{}".format(dist, path) os.putenv("PYTHONPATH", path) os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable) os.putenv('PYSPARK_PYTHON', sys.executable) os.environ["PYTHONPATH"] = path os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable os.environ['PYSPARK_PYTHON'] = sys.executable sys.path.insert(0, dist) pytestConfigArgs = sys.argv[1].replace("'", "").split(" ") args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])] code = pytest.main(args) sys.exit(code)
Fix running python tests by changing the env directly
[SW-1610][FollowUp] Fix running python tests by changing the env directly (cherry picked from commit 0d808a100cd14fce9d4fba4f9cde6ad5315fbc12)
Python
apache-2.0
h2oai/sparkling-water,h2oai/sparkling-water,h2oai/sparkling-water,h2oai/sparkling-water
import os import sys import pytest dist = sys.argv[2] path = os.getenv("PYTHONPATH") if path is None: path = dist else: path = "{}:{}".format(dist, path) + os.putenv("PYTHONPATH", path) + os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable) + os.putenv('PYSPARK_PYTHON', sys.executable) + os.environ["PYTHONPATH"] = path os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable os.environ['PYSPARK_PYTHON'] = sys.executable sys.path.insert(0, dist) pytestConfigArgs = sys.argv[1].replace("'", "").split(" ") args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])] code = pytest.main(args) sys.exit(code)
Fix running python tests by changing the env directly
## Code Before: import os import sys import pytest dist = sys.argv[2] path = os.getenv("PYTHONPATH") if path is None: path = dist else: path = "{}:{}".format(dist, path) os.environ["PYTHONPATH"] = path os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable os.environ['PYSPARK_PYTHON'] = sys.executable sys.path.insert(0, dist) pytestConfigArgs = sys.argv[1].replace("'", "").split(" ") args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])] code = pytest.main(args) sys.exit(code) ## Instruction: Fix running python tests by changing the env directly ## Code After: import os import sys import pytest dist = sys.argv[2] path = os.getenv("PYTHONPATH") if path is None: path = dist else: path = "{}:{}".format(dist, path) os.putenv("PYTHONPATH", path) os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable) os.putenv('PYSPARK_PYTHON', sys.executable) os.environ["PYTHONPATH"] = path os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable os.environ['PYSPARK_PYTHON'] = sys.executable sys.path.insert(0, dist) pytestConfigArgs = sys.argv[1].replace("'", "").split(" ") args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])] code = pytest.main(args) sys.exit(code)
... os.putenv("PYTHONPATH", path) os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable) os.putenv('PYSPARK_PYTHON', sys.executable) os.environ["PYTHONPATH"] = path ...
9ea4164f739b06752719ad4e68f5af85b18f9f1c
tests/scripts/constants.py
tests/scripts/constants.py
from __future__ import print_function # Imports import os ############################################ # CONSTANTS ############################################ DEFAULT_SKIP = True DEFAULT_NUM_RETRIES = 3 DEFAULT_FAIL_FAST = False DEFAULT_FAMILIES = ["autoparallel", "c", "cloud", "java", "performance", "pscos", "python", "tools", "fault_tolerance"] DEFAULT_TESTS = [] DEFAULT_CFG_FILE = "NIO.cfg" DEFAULT_CFG_EXTENSION = ".cfg" DEFAULT_COMPSS_HOME = "/opt/COMPSs/" DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor" DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"] SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources") CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations") RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss" CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
from __future__ import print_function # Imports import os ############################################ # CONSTANTS ############################################ DEFAULT_SKIP = True DEFAULT_NUM_RETRIES = 3 DEFAULT_FAIL_FAST = False DEFAULT_FAMILIES = [ "agents", "autoparallel", "c", "cloud", "java", "performance", "pscos", "python", "tools", "fault_tolerance"] DEFAULT_TESTS = [] DEFAULT_CFG_FILE = "NIO.cfg" DEFAULT_CFG_EXTENSION = ".cfg" DEFAULT_COMPSS_HOME = "/opt/COMPSs/" DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor" DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"] SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources") CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations") RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss" CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
Include agents as a default test family
Include agents as a default test family
Python
apache-2.0
mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs
from __future__ import print_function # Imports import os ############################################ # CONSTANTS ############################################ DEFAULT_SKIP = True DEFAULT_NUM_RETRIES = 3 DEFAULT_FAIL_FAST = False - DEFAULT_FAMILIES = ["autoparallel", + DEFAULT_FAMILIES = [ + "agents", + "autoparallel", "c", "cloud", "java", "performance", "pscos", "python", "tools", "fault_tolerance"] DEFAULT_TESTS = [] DEFAULT_CFG_FILE = "NIO.cfg" DEFAULT_CFG_EXTENSION = ".cfg" DEFAULT_COMPSS_HOME = "/opt/COMPSs/" DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor" DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"] SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources") CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations") RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss" CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
Include agents as a default test family
## Code Before: from __future__ import print_function # Imports import os ############################################ # CONSTANTS ############################################ DEFAULT_SKIP = True DEFAULT_NUM_RETRIES = 3 DEFAULT_FAIL_FAST = False DEFAULT_FAMILIES = ["autoparallel", "c", "cloud", "java", "performance", "pscos", "python", "tools", "fault_tolerance"] DEFAULT_TESTS = [] DEFAULT_CFG_FILE = "NIO.cfg" DEFAULT_CFG_EXTENSION = ".cfg" DEFAULT_COMPSS_HOME = "/opt/COMPSs/" DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor" DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"] SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources") CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations") RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss" CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs" ## Instruction: Include agents as a default test family ## Code After: from __future__ import print_function # Imports import os ############################################ # CONSTANTS ############################################ DEFAULT_SKIP = True DEFAULT_NUM_RETRIES = 3 DEFAULT_FAIL_FAST = False DEFAULT_FAMILIES = [ "agents", "autoparallel", "c", "cloud", "java", "performance", "pscos", "python", "tools", "fault_tolerance"] DEFAULT_TESTS = [] DEFAULT_CFG_FILE = "NIO.cfg" DEFAULT_CFG_EXTENSION = ".cfg" DEFAULT_COMPSS_HOME = "/opt/COMPSs/" DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor" DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"] SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources") CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations") RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss" CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
// ... existing code ... DEFAULT_FAIL_FAST = False DEFAULT_FAMILIES = [ "agents", "autoparallel", "c", // ... rest of the code ...
109085a9f5f6eded6ea2afe1f6aabaf183980d7c
scripts/jenkins/cloud/ansible/roles/heat-generator/files/dns-update.py
scripts/jenkins/cloud/ansible/roles/heat-generator/files/dns-update.py
import argparse import yaml def parse_commandline(): parser = argparse.ArgumentParser() parser.add_argument( "--dns-servers", metavar="NAME", help="A list of nameservers", nargs="+", default=[]) parser.add_argument( "--ntp-servers", metavar="NAME", help="A list of ntp servers", nargs="+", default=[]) return parser.parse_args() if __name__ == "__main__": options = parse_commandline() print(options) with open('cloudConfig.yml') as f: data = yaml.load(f.read(), Loader=yaml.SafeLoader) data['cloud']['dns-settings'] = dict(nameservers=options.dns_servers) data['cloud']['ntp-servers'] = options.ntp_servers with open('cloudConfig.yml', 'w') as f: f.write(yaml.safe_dump(data, default_flow_style=False))
import argparse import yaml def parse_commandline(): parser = argparse.ArgumentParser() parser.add_argument( "--dns-servers", metavar="NAME", help="A list of nameservers", nargs="+", default=[]) parser.add_argument( "--ntp-servers", metavar="NAME", help="A list of ntp servers", nargs="+", default=[]) parser.add_argument( "--cloud-config", metavar="FILE", help="The cloudConfig.yml FILE", default="cloudConfig.yml") return parser.parse_args() if __name__ == "__main__": options = parse_commandline() print(options) with open(options.cloud_config) as f: data = yaml.load(f.read(), Loader=yaml.SafeLoader) data['cloud']['dns-settings'] = dict(nameservers=options.dns_servers) data['cloud']['ntp-servers'] = options.ntp_servers with open(options.cloud_config, 'w') as f: f.write(yaml.safe_dump(data, default_flow_style=False))
Add option for name and path of cloudConfig.yml file
Add option for name and path of cloudConfig.yml file This change adds a command line option to the dns script to specify the name and location of the `cloudConfig.yml` file. Signed-off-by: Nicolas Bock <[email protected]>
Python
apache-2.0
aspiers/automation,SUSE-Cloud/automation,gosipyan/automation,gosipyan/automation,gosipyan/automation,gosipyan/automation,SUSE-Cloud/automation,aspiers/automation,aspiers/automation,aspiers/automation,SUSE-Cloud/automation,SUSE-Cloud/automation
import argparse import yaml def parse_commandline(): parser = argparse.ArgumentParser() parser.add_argument( "--dns-servers", metavar="NAME", help="A list of nameservers", nargs="+", default=[]) parser.add_argument( "--ntp-servers", metavar="NAME", help="A list of ntp servers", nargs="+", default=[]) + parser.add_argument( + "--cloud-config", + metavar="FILE", + help="The cloudConfig.yml FILE", + default="cloudConfig.yml") return parser.parse_args() if __name__ == "__main__": options = parse_commandline() print(options) - with open('cloudConfig.yml') as f: + with open(options.cloud_config) as f: data = yaml.load(f.read(), Loader=yaml.SafeLoader) data['cloud']['dns-settings'] = dict(nameservers=options.dns_servers) data['cloud']['ntp-servers'] = options.ntp_servers - with open('cloudConfig.yml', 'w') as f: + with open(options.cloud_config, 'w') as f: f.write(yaml.safe_dump(data, default_flow_style=False))
Add option for name and path of cloudConfig.yml file
## Code Before: import argparse import yaml def parse_commandline(): parser = argparse.ArgumentParser() parser.add_argument( "--dns-servers", metavar="NAME", help="A list of nameservers", nargs="+", default=[]) parser.add_argument( "--ntp-servers", metavar="NAME", help="A list of ntp servers", nargs="+", default=[]) return parser.parse_args() if __name__ == "__main__": options = parse_commandline() print(options) with open('cloudConfig.yml') as f: data = yaml.load(f.read(), Loader=yaml.SafeLoader) data['cloud']['dns-settings'] = dict(nameservers=options.dns_servers) data['cloud']['ntp-servers'] = options.ntp_servers with open('cloudConfig.yml', 'w') as f: f.write(yaml.safe_dump(data, default_flow_style=False)) ## Instruction: Add option for name and path of cloudConfig.yml file ## Code After: import argparse import yaml def parse_commandline(): parser = argparse.ArgumentParser() parser.add_argument( "--dns-servers", metavar="NAME", help="A list of nameservers", nargs="+", default=[]) parser.add_argument( "--ntp-servers", metavar="NAME", help="A list of ntp servers", nargs="+", default=[]) parser.add_argument( "--cloud-config", metavar="FILE", help="The cloudConfig.yml FILE", default="cloudConfig.yml") return parser.parse_args() if __name__ == "__main__": options = parse_commandline() print(options) with open(options.cloud_config) as f: data = yaml.load(f.read(), Loader=yaml.SafeLoader) data['cloud']['dns-settings'] = dict(nameservers=options.dns_servers) data['cloud']['ntp-servers'] = options.ntp_servers with open(options.cloud_config, 'w') as f: f.write(yaml.safe_dump(data, default_flow_style=False))
// ... existing code ... default=[]) parser.add_argument( "--cloud-config", metavar="FILE", help="The cloudConfig.yml FILE", default="cloudConfig.yml") return parser.parse_args() // ... modified code ... with open(options.cloud_config) as f: data = yaml.load(f.read(), Loader=yaml.SafeLoader) ... with open(options.cloud_config, 'w') as f: f.write(yaml.safe_dump(data, default_flow_style=False)) // ... rest of the code ...
492004049da87744cd96a6e6afeb9a6239a8ac44
ocradmin/lib/nodetree/registry.py
ocradmin/lib/nodetree/registry.py
class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
Fix missing import. Add method to get all nodes with a particular attribute
Fix missing import. Add method to get all nodes with a particular attribute
Python
apache-2.0
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
+ + import inspect class NotRegistered(KeyError): pass - - - __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): - """Register a node in the node registry. + """Register a node class in the node registry.""" - - The node will be automatically instantiated if not already an - instance. - - """ - self[node.name] = inspect.isclass(node) and node() or node + self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) - def filter_types(self, type): + def get_by_attr(self, attr, value=None): - """Return all nodes of a specific type.""" + """Return all nodes of a specific type that have a matching attr. - return dict((name, node) for name, node in self.iteritems() - if node.type == type) + If `value` is given, only return nodes where the attr value matches.""" + ret = {} + for name, node in self.iteritems(): + if hasattr(node, attr) and value is None\ + or hasattr(node, name) and getattr(node, name) == value: + ret[name] = node + return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
Fix missing import. Add method to get all nodes with a particular attribute
## Code Before: class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry() ## Instruction: Fix missing import. Add method to get all nodes with a particular attribute ## Code After: import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
# ... existing code ... import inspect # ... modified code ... pass ... def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ ... def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret # ... rest of the code ...
30dd3c8436ebe69aff2956322312072e8ab581f0
example/tests/test_fields.py
example/tests/test_fields.py
from __future__ import unicode_literals from django.test import TestCase from shop.models.cart import BaseCartItem class JSONFieldTest(TestCase): """JSONField Wrapper Tests""" def test_json_field_create(self): """Test saving a JSON object in our JSONField""" json_obj = { "item_1": "this is a json blah", "blergh": "hey, hey, hey"} obj = BaseCartItem.objects.create(extra=json_obj) new_obj = BaseCartItem.objects.get(id=obj.id) self.assertEqual(new_obj.json, json_obj)
from __future__ import unicode_literals from django.test import TestCase from shop.models.defaults.customer import Customer class JSONFieldTest(TestCase): """JSONField Wrapper Tests""" def test_json_field_create(self): """Test saving a JSON object in our JSONField""" json_obj = { "item_1": "this is a json blah", "blergh": "hey, hey, hey"} obj = Customer.objects.create(extra=json_obj) new_obj = Customer.objects.get(id=obj.id) self.assertEqual(new_obj.extra, json_obj)
Update model used to test
Update model used to test
Python
bsd-3-clause
jrief/django-shop,khchine5/django-shop,khchine5/django-shop,awesto/django-shop,divio/django-shop,khchine5/django-shop,nimbis/django-shop,jrief/django-shop,khchine5/django-shop,nimbis/django-shop,divio/django-shop,jrief/django-shop,nimbis/django-shop,nimbis/django-shop,awesto/django-shop,awesto/django-shop,jrief/django-shop,divio/django-shop
from __future__ import unicode_literals from django.test import TestCase - from shop.models.cart import BaseCartItem + from shop.models.defaults.customer import Customer class JSONFieldTest(TestCase): """JSONField Wrapper Tests""" def test_json_field_create(self): """Test saving a JSON object in our JSONField""" json_obj = { "item_1": "this is a json blah", "blergh": "hey, hey, hey"} - obj = BaseCartItem.objects.create(extra=json_obj) + obj = Customer.objects.create(extra=json_obj) - new_obj = BaseCartItem.objects.get(id=obj.id) + new_obj = Customer.objects.get(id=obj.id) - self.assertEqual(new_obj.json, json_obj) + self.assertEqual(new_obj.extra, json_obj)
Update model used to test
## Code Before: from __future__ import unicode_literals from django.test import TestCase from shop.models.cart import BaseCartItem class JSONFieldTest(TestCase): """JSONField Wrapper Tests""" def test_json_field_create(self): """Test saving a JSON object in our JSONField""" json_obj = { "item_1": "this is a json blah", "blergh": "hey, hey, hey"} obj = BaseCartItem.objects.create(extra=json_obj) new_obj = BaseCartItem.objects.get(id=obj.id) self.assertEqual(new_obj.json, json_obj) ## Instruction: Update model used to test ## Code After: from __future__ import unicode_literals from django.test import TestCase from shop.models.defaults.customer import Customer class JSONFieldTest(TestCase): """JSONField Wrapper Tests""" def test_json_field_create(self): """Test saving a JSON object in our JSONField""" json_obj = { "item_1": "this is a json blah", "blergh": "hey, hey, hey"} obj = Customer.objects.create(extra=json_obj) new_obj = Customer.objects.get(id=obj.id) self.assertEqual(new_obj.extra, json_obj)
# ... existing code ... from shop.models.defaults.customer import Customer # ... modified code ... obj = Customer.objects.create(extra=json_obj) new_obj = Customer.objects.get(id=obj.id) self.assertEqual(new_obj.extra, json_obj) # ... rest of the code ...
316d9557002c54c5dd03f2a740367946b997d06a
src/foremast/utils/generate_encoded_user_data.py
src/foremast/utils/generate_encoded_user_data.py
"""Generate base64 encoded User Data.""" import base64 from ..utils import get_template def generate_encoded_user_data(env='dev', region='us-east-1', app_name='', group_name=''): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. app_name (str): Application name, e.g. coreforrest. group_name (str): Application group nane, e.g. core. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ user_data = get_template(template_file='user_data.sh.j2', env=env, region=region, app_name=app_name, group_name=group_name, ) return base64.b64encode(user_data.encode()).decode()
"""Generate base64 encoded User Data.""" import base64 from .get_template import get_template def generate_encoded_user_data(env='dev', region='us-east-1', app_name='', group_name=''): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. app_name (str): Application name, e.g. coreforrest. group_name (str): Application group nane, e.g. core. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ user_data = get_template(template_file='user_data.sh.j2', env=env, region=region, app_name=app_name, group_name=group_name, ) return base64.b64encode(user_data.encode()).decode()
Use new relative import within directory
fix: Use new relative import within directory See also: PSOBAT-1197
Python
apache-2.0
gogoair/foremast,gogoair/foremast
"""Generate base64 encoded User Data.""" import base64 - from ..utils import get_template + from .get_template import get_template def generate_encoded_user_data(env='dev', region='us-east-1', app_name='', group_name=''): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. app_name (str): Application name, e.g. coreforrest. group_name (str): Application group nane, e.g. core. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ user_data = get_template(template_file='user_data.sh.j2', env=env, region=region, app_name=app_name, group_name=group_name, ) return base64.b64encode(user_data.encode()).decode()
Use new relative import within directory
## Code Before: """Generate base64 encoded User Data.""" import base64 from ..utils import get_template def generate_encoded_user_data(env='dev', region='us-east-1', app_name='', group_name=''): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. app_name (str): Application name, e.g. coreforrest. group_name (str): Application group nane, e.g. core. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ user_data = get_template(template_file='user_data.sh.j2', env=env, region=region, app_name=app_name, group_name=group_name, ) return base64.b64encode(user_data.encode()).decode() ## Instruction: Use new relative import within directory ## Code After: """Generate base64 encoded User Data.""" import base64 from .get_template import get_template def generate_encoded_user_data(env='dev', region='us-east-1', app_name='', group_name=''): r"""Generate base64 encoded User Data. Args: env (str): Deployment environment, e.g. dev, stage. region (str): AWS Region, e.g. us-east-1. app_name (str): Application name, e.g. coreforrest. group_name (str): Application group nane, e.g. core. Returns: str: base64 encoded User Data script. #!/bin/bash export CLOUD_ENVIRONMENT=dev export CLOUD_APP=coreforrest export CLOUD_APP_GROUP=forrest export CLOUD_STACK=forrest export EC2_REGION=us-east-1 export CLOUD_DOMAIN=dev.example.com printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env """ user_data = get_template(template_file='user_data.sh.j2', env=env, region=region, app_name=app_name, group_name=group_name, ) return base64.b64encode(user_data.encode()).decode()
# ... existing code ... from .get_template import get_template # ... rest of the code ...
26833c5d41bb3611aa61655c28da4d40b173712e
Orange/tests/test_preprocess.py
Orange/tests/test_preprocess.py
import unittest from mock import Mock, MagicMock, patch import Orange class TestPreprocess(unittest.TestCase): def test_read_data_calls_reader(self): class MockPreprocessor(Orange.preprocess.preprocess.Preprocess): __init__ = Mock() __call__ = Mock() @classmethod def reset(cls): cls.__init__.reset_mock() cls.__call__.reset_mock() return MockPreprocessor table = Mock(Orange.data.Table) MockPreprocessor(table, 1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3}) MockPreprocessor.__call__.assert_called_with(table) MockPreprocessor.reset() MockPreprocessor = create_mock() MockPreprocessor(1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) self.assertEqual(MockPreprocessor.__call__.call_count(), 0) MockPreprocessor = create_mock() MockPreprocessor(a=3) MockPreprocessor.__init__.assert_called_with(a=3) self.assertEqual(MockPreprocessor.__call__.call_count(), 0) MockPreprocessor = create_mock() MockPreprocessor() MockPreprocessor.__init__.assert_called_with() self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
import unittest from mock import Mock, MagicMock, patch import Orange class TestPreprocess(unittest.TestCase): def test_read_data_calls_reader(self): class MockPreprocessor(Orange.preprocess.preprocess.Preprocess): __init__ = Mock(return_value=None) __call__ = Mock() @classmethod def reset(cls): cls.__init__.reset_mock() cls.__call__.reset_mock() table = Mock(Orange.data.Table) MockPreprocessor(table, 1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) MockPreprocessor.__call__.assert_called_with(table) MockPreprocessor.reset() MockPreprocessor(1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) self.assertEqual(MockPreprocessor.__call__.call_count, 0) MockPreprocessor(a=3) MockPreprocessor.__init__.assert_called_with(a=3) self.assertEqual(MockPreprocessor.__call__.call_count, 0) MockPreprocessor() MockPreprocessor.__init__.assert_called_with() self.assertEqual(MockPreprocessor.__call__.call_count, 0)
Fix tests for Preprocess constructors
Fix tests for Preprocess constructors
Python
bsd-2-clause
qusp/orange3,qusp/orange3,qPCR4vir/orange3,qusp/orange3,qPCR4vir/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,marinkaz/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,qusp/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3
import unittest from mock import Mock, MagicMock, patch import Orange class TestPreprocess(unittest.TestCase): def test_read_data_calls_reader(self): class MockPreprocessor(Orange.preprocess.preprocess.Preprocess): - __init__ = Mock() + __init__ = Mock(return_value=None) __call__ = Mock() @classmethod def reset(cls): cls.__init__.reset_mock() cls.__call__.reset_mock() - return MockPreprocessor table = Mock(Orange.data.Table) MockPreprocessor(table, 1, 2, a=3) - MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3}) + MockPreprocessor.__init__.assert_called_with(1, 2, a=3) MockPreprocessor.__call__.assert_called_with(table) MockPreprocessor.reset() - MockPreprocessor = create_mock() MockPreprocessor(1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) - self.assertEqual(MockPreprocessor.__call__.call_count(), 0) + self.assertEqual(MockPreprocessor.__call__.call_count, 0) - MockPreprocessor = create_mock() MockPreprocessor(a=3) MockPreprocessor.__init__.assert_called_with(a=3) - self.assertEqual(MockPreprocessor.__call__.call_count(), 0) + self.assertEqual(MockPreprocessor.__call__.call_count, 0) - MockPreprocessor = create_mock() MockPreprocessor() MockPreprocessor.__init__.assert_called_with() - self.assertEqual(MockPreprocessor.__call__.call_count(), 0) + self.assertEqual(MockPreprocessor.__call__.call_count, 0)
Fix tests for Preprocess constructors
## Code Before: import unittest from mock import Mock, MagicMock, patch import Orange class TestPreprocess(unittest.TestCase): def test_read_data_calls_reader(self): class MockPreprocessor(Orange.preprocess.preprocess.Preprocess): __init__ = Mock() __call__ = Mock() @classmethod def reset(cls): cls.__init__.reset_mock() cls.__call__.reset_mock() return MockPreprocessor table = Mock(Orange.data.Table) MockPreprocessor(table, 1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3}) MockPreprocessor.__call__.assert_called_with(table) MockPreprocessor.reset() MockPreprocessor = create_mock() MockPreprocessor(1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) self.assertEqual(MockPreprocessor.__call__.call_count(), 0) MockPreprocessor = create_mock() MockPreprocessor(a=3) MockPreprocessor.__init__.assert_called_with(a=3) self.assertEqual(MockPreprocessor.__call__.call_count(), 0) MockPreprocessor = create_mock() MockPreprocessor() MockPreprocessor.__init__.assert_called_with() self.assertEqual(MockPreprocessor.__call__.call_count(), 0) ## Instruction: Fix tests for Preprocess constructors ## Code After: import unittest from mock import Mock, MagicMock, patch import Orange class TestPreprocess(unittest.TestCase): def test_read_data_calls_reader(self): class MockPreprocessor(Orange.preprocess.preprocess.Preprocess): __init__ = Mock(return_value=None) __call__ = Mock() @classmethod def reset(cls): cls.__init__.reset_mock() cls.__call__.reset_mock() table = Mock(Orange.data.Table) MockPreprocessor(table, 1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) MockPreprocessor.__call__.assert_called_with(table) MockPreprocessor.reset() MockPreprocessor(1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) self.assertEqual(MockPreprocessor.__call__.call_count, 0) MockPreprocessor(a=3) MockPreprocessor.__init__.assert_called_with(a=3) self.assertEqual(MockPreprocessor.__call__.call_count, 0) MockPreprocessor() MockPreprocessor.__init__.assert_called_with() self.assertEqual(MockPreprocessor.__call__.call_count, 0)
# ... existing code ... class MockPreprocessor(Orange.preprocess.preprocess.Preprocess): __init__ = Mock(return_value=None) __call__ = Mock() # ... modified code ... cls.__call__.reset_mock() ... MockPreprocessor(table, 1, 2, a=3) MockPreprocessor.__init__.assert_called_with(1, 2, a=3) MockPreprocessor.__call__.assert_called_with(table) ... MockPreprocessor(1, 2, a=3) ... MockPreprocessor.__init__.assert_called_with(1, 2, a=3) self.assertEqual(MockPreprocessor.__call__.call_count, 0) MockPreprocessor(a=3) ... MockPreprocessor.__init__.assert_called_with(a=3) self.assertEqual(MockPreprocessor.__call__.call_count, 0) MockPreprocessor() ... MockPreprocessor.__init__.assert_called_with() self.assertEqual(MockPreprocessor.__call__.call_count, 0) # ... rest of the code ...
ca758b2813ae77b795c4318d7d5566cd47ab0ec7
postgres/operations.py
postgres/operations.py
from django.db.migrations.operations.base import Operation from django.db import connection from psycopg2.extras import register_composite class LoadSQLFromScript(Operation): def __init__(self, filename): self.filename = filename @property def reversible(self): return False def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute(open(self.filename).read().replace('%', '%%')) class CreateCompositeType(Operation): def __init__(self, name=None, fields=None): self.name = name self.fields = fields @property def reversible(self): return True def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('CREATE TYPE %s AS (%s)' % ( self.name, ", ".join(["%s %s" % field for field in self.fields]) )) def state_backwards(self, app_label, state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('DROP TYPE %s' % self.name)
from django.db.migrations.operations.base import Operation from django.db import connection from psycopg2.extras import register_composite from .fields.composite import composite_type_created class LoadSQLFromScript(Operation): def __init__(self, filename): self.filename = filename @property def reversible(self): return False def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute(open(self.filename).read().replace('%', '%%')) class CreateCompositeType(Operation): def __init__(self, name=None, fields=None): self.name = name self.fields = fields @property def reversible(self): return True def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('CREATE TYPE %s AS (%s)' % ( self.name, ", ".join(["%s %s" % field for field in self.fields]) )) composite_type_created.send(sender=self.__class__, db_type=self.name) def state_backwards(self, app_label, state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('DROP TYPE %s' % self.name)
Send a signal after creation of composite field.
Send a signal after creation of composite field.
Python
bsd-3-clause
wlanslovenija/django-postgres
from django.db.migrations.operations.base import Operation from django.db import connection from psycopg2.extras import register_composite + from .fields.composite import composite_type_created + class LoadSQLFromScript(Operation): def __init__(self, filename): self.filename = filename @property def reversible(self): return False def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute(open(self.filename).read().replace('%', '%%')) class CreateCompositeType(Operation): def __init__(self, name=None, fields=None): self.name = name self.fields = fields @property def reversible(self): return True def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('CREATE TYPE %s AS (%s)' % ( self.name, ", ".join(["%s %s" % field for field in self.fields]) )) + composite_type_created.send(sender=self.__class__, db_type=self.name) def state_backwards(self, app_label, state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('DROP TYPE %s' % self.name)
Send a signal after creation of composite field.
## Code Before: from django.db.migrations.operations.base import Operation from django.db import connection from psycopg2.extras import register_composite class LoadSQLFromScript(Operation): def __init__(self, filename): self.filename = filename @property def reversible(self): return False def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute(open(self.filename).read().replace('%', '%%')) class CreateCompositeType(Operation): def __init__(self, name=None, fields=None): self.name = name self.fields = fields @property def reversible(self): return True def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('CREATE TYPE %s AS (%s)' % ( self.name, ", ".join(["%s %s" % field for field in self.fields]) )) def state_backwards(self, app_label, state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('DROP TYPE %s' % self.name) ## Instruction: Send a signal after creation of composite field. ## Code After: from django.db.migrations.operations.base import Operation from django.db import connection from psycopg2.extras import register_composite from .fields.composite import composite_type_created class LoadSQLFromScript(Operation): def __init__(self, filename): self.filename = filename @property def reversible(self): return False def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute(open(self.filename).read().replace('%', '%%')) class CreateCompositeType(Operation): def __init__(self, name=None, fields=None): self.name = name self.fields = fields @property def reversible(self): return True def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('CREATE TYPE %s AS (%s)' % ( self.name, ", ".join(["%s %s" % field for field in self.fields]) )) composite_type_created.send(sender=self.__class__, db_type=self.name) def state_backwards(self, app_label, state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): schema_editor.execute('DROP TYPE %s' % self.name)
... from psycopg2.extras import register_composite from .fields.composite import composite_type_created ... )) composite_type_created.send(sender=self.__class__, db_type=self.name) ...
00229b2ced2f042cdcbb24bfaac4d33051930b86
source/bark/logger.py
source/bark/logger.py
import copy import bark from .log import Log class Logger(Log): '''Helper for emitting logs. A logger can be used to preset common information (such as a name) and then emit :py:class:`~bark.log.Log` records with that information already present. ''' def __init__(self, name, **kw): '''Initialise logger with identifying *name*.''' kw['name'] = name super(Logger, self).__init__(**kw) def log(self, message, **kw): '''Emit a :py:class:`~bark.log.Log` record. A copy of this logger's information is made and then merged with the passed in *kw* arguments before being emitted. ''' log = copy.deepcopy(self) log.update(**kw) log['message'] = message # Call global handle method. bark.handle(log)
import copy import bark from .log import Log class Logger(Log): '''Helper for emitting logs. A logger can be used to preset common information (such as a name) and then emit :py:class:`~bark.log.Log` records with that information already present. ''' def __init__(self, name, _handle=bark.handle, **kw): '''Initialise logger with identifying *name*. If you need to override the default handle then pass in a custom *_handle* ''' kw['name'] = name super(Logger, self).__init__(**kw) self._handle = _handle def log(self, message, **kw): '''Emit a :py:class:`~bark.log.Log` record. A copy of this logger's information is made and then merged with the passed in *kw* arguments before being emitted. ''' log = copy.deepcopy(self) log.update(**kw) log['message'] = message self._handle(log)
Allow handle to be passed in to avoid embedded global reference.
Allow handle to be passed in to avoid embedded global reference.
Python
apache-2.0
4degrees/mill,4degrees/sawmill
import copy import bark from .log import Log class Logger(Log): '''Helper for emitting logs. A logger can be used to preset common information (such as a name) and then emit :py:class:`~bark.log.Log` records with that information already present. ''' - def __init__(self, name, **kw): + def __init__(self, name, _handle=bark.handle, **kw): - '''Initialise logger with identifying *name*.''' + '''Initialise logger with identifying *name*. + + If you need to override the default handle then pass in a custom + *_handle* + + ''' kw['name'] = name super(Logger, self).__init__(**kw) + self._handle = _handle def log(self, message, **kw): '''Emit a :py:class:`~bark.log.Log` record. A copy of this logger's information is made and then merged with the passed in *kw* arguments before being emitted. ''' log = copy.deepcopy(self) log.update(**kw) log['message'] = message - # Call global handle method. - bark.handle(log) + self._handle(log)
Allow handle to be passed in to avoid embedded global reference.
## Code Before: import copy import bark from .log import Log class Logger(Log): '''Helper for emitting logs. A logger can be used to preset common information (such as a name) and then emit :py:class:`~bark.log.Log` records with that information already present. ''' def __init__(self, name, **kw): '''Initialise logger with identifying *name*.''' kw['name'] = name super(Logger, self).__init__(**kw) def log(self, message, **kw): '''Emit a :py:class:`~bark.log.Log` record. A copy of this logger's information is made and then merged with the passed in *kw* arguments before being emitted. ''' log = copy.deepcopy(self) log.update(**kw) log['message'] = message # Call global handle method. bark.handle(log) ## Instruction: Allow handle to be passed in to avoid embedded global reference. ## Code After: import copy import bark from .log import Log class Logger(Log): '''Helper for emitting logs. A logger can be used to preset common information (such as a name) and then emit :py:class:`~bark.log.Log` records with that information already present. ''' def __init__(self, name, _handle=bark.handle, **kw): '''Initialise logger with identifying *name*. If you need to override the default handle then pass in a custom *_handle* ''' kw['name'] = name super(Logger, self).__init__(**kw) self._handle = _handle def log(self, message, **kw): '''Emit a :py:class:`~bark.log.Log` record. A copy of this logger's information is made and then merged with the passed in *kw* arguments before being emitted. ''' log = copy.deepcopy(self) log.update(**kw) log['message'] = message self._handle(log)
// ... existing code ... def __init__(self, name, _handle=bark.handle, **kw): '''Initialise logger with identifying *name*. If you need to override the default handle then pass in a custom *_handle* ''' kw['name'] = name // ... modified code ... super(Logger, self).__init__(**kw) self._handle = _handle ... self._handle(log) // ... rest of the code ...
c2fb467626d586bfb5ddef60fd4d1447515ad161
fpsd/evaluation.py
fpsd/evaluation.py
def get_feature_importances(model): try: return model.feature_importances_ except: pass try: # Must be 1D for feature importance plot if len(model.coef_) <= 1: return model.coef_[0] else: return model.coef_ except: pass return None
def get_feature_importances(model): try: return model.feature_importances_ except: pass try: # Must be 1D for feature importance plot if len(model.coef_) <= 1: return model.coef_[0] else: return model.coef_ except: pass return None def plot_feature_importances(feature_names, feature_importances, N=30): importances = list(zip(feature_names, list(feature_importances))) importances = pd.DataFrame(importances, columns=["Feature", "Importance"]) importances = importances.set_index("Feature") # Sort by the absolute value of the importance of the feature importances["sort"] = abs(importances["Importance"]) importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1) importances = importances[0:N] # Show the most important positive feature at the top of the graph importances = importances.sort(columns="Importance", ascending=True) with plt.style.context(('ggplot')): fig, ax = plt.subplots(figsize=(16,12)) ax.tick_params(labelsize=16) importances.plot(kind="barh", legend=False, ax=ax) ax.set_frame_on(False) ax.set_xlabel("Relative importance", fontsize=20) ax.set_ylabel("Feature name", fontsize=20) plt.tight_layout() plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99]) return fig
Add function for plotting feature importances
Add function for plotting feature importances
Python
agpl-3.0
freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop
def get_feature_importances(model): try: return model.feature_importances_ except: pass try: # Must be 1D for feature importance plot if len(model.coef_) <= 1: return model.coef_[0] else: return model.coef_ except: pass return None + + def plot_feature_importances(feature_names, feature_importances, N=30): + importances = list(zip(feature_names, list(feature_importances))) + importances = pd.DataFrame(importances, columns=["Feature", "Importance"]) + importances = importances.set_index("Feature") + + # Sort by the absolute value of the importance of the feature + importances["sort"] = abs(importances["Importance"]) + importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1) + importances = importances[0:N] + + # Show the most important positive feature at the top of the graph + importances = importances.sort(columns="Importance", ascending=True) + + with plt.style.context(('ggplot')): + fig, ax = plt.subplots(figsize=(16,12)) + ax.tick_params(labelsize=16) + importances.plot(kind="barh", legend=False, ax=ax) + ax.set_frame_on(False) + ax.set_xlabel("Relative importance", fontsize=20) + ax.set_ylabel("Feature name", fontsize=20) + plt.tight_layout() + plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99]) + return fig
Add function for plotting feature importances
## Code Before: def get_feature_importances(model): try: return model.feature_importances_ except: pass try: # Must be 1D for feature importance plot if len(model.coef_) <= 1: return model.coef_[0] else: return model.coef_ except: pass return None ## Instruction: Add function for plotting feature importances ## Code After: def get_feature_importances(model): try: return model.feature_importances_ except: pass try: # Must be 1D for feature importance plot if len(model.coef_) <= 1: return model.coef_[0] else: return model.coef_ except: pass return None def plot_feature_importances(feature_names, feature_importances, N=30): importances = list(zip(feature_names, list(feature_importances))) importances = pd.DataFrame(importances, columns=["Feature", "Importance"]) importances = importances.set_index("Feature") # Sort by the absolute value of the importance of the feature importances["sort"] = abs(importances["Importance"]) importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1) importances = importances[0:N] # Show the most important positive feature at the top of the graph importances = importances.sort(columns="Importance", ascending=True) with plt.style.context(('ggplot')): fig, ax = plt.subplots(figsize=(16,12)) ax.tick_params(labelsize=16) importances.plot(kind="barh", legend=False, ax=ax) ax.set_frame_on(False) ax.set_xlabel("Relative importance", fontsize=20) ax.set_ylabel("Feature name", fontsize=20) plt.tight_layout() plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99]) return fig
# ... existing code ... return None def plot_feature_importances(feature_names, feature_importances, N=30): importances = list(zip(feature_names, list(feature_importances))) importances = pd.DataFrame(importances, columns=["Feature", "Importance"]) importances = importances.set_index("Feature") # Sort by the absolute value of the importance of the feature importances["sort"] = abs(importances["Importance"]) importances = importances.sort(columns="sort", ascending=False).drop("sort", axis=1) importances = importances[0:N] # Show the most important positive feature at the top of the graph importances = importances.sort(columns="Importance", ascending=True) with plt.style.context(('ggplot')): fig, ax = plt.subplots(figsize=(16,12)) ax.tick_params(labelsize=16) importances.plot(kind="barh", legend=False, ax=ax) ax.set_frame_on(False) ax.set_xlabel("Relative importance", fontsize=20) ax.set_ylabel("Feature name", fontsize=20) plt.tight_layout() plt.title("Most important features for attack", fontsize=20).set_position([.5, 0.99]) return fig # ... rest of the code ...
6869d5edd706d95c8cadbd1945b29fdd3bfecd6b
blaze/datashape/unification.py
blaze/datashape/unification.py
from numpy import promote_types from coretypes import Fixed, Range, TypeVar, Record, \ CType, Enum, top, dynamic class Incommensurable(Exception): def __init__(self, space, dim): self.space = space self.dim = dim def __str__(self): return "No way of unifying (%s) (%s)" % ( self.space, self.dim ) def unify(a, b): """ Unification of Datashapes. """ ta = type(a) tb = type(b) # -- # Unification over BlazeT has two zeros if ta == top or tb == top: return top if ta == dynamic or tb == dynamic: return top # -- if (ta,tb) == (Fixed, Fixed): if a.val == b.val: return Fixed(a.val) else: return Enum(a.val, b.val) # -- if (ta,tb) == (TypeVar, Fixed): return TypeVar('x0') if (ta,tb) == (Fixed, TypeVar): return TypeVar('x0') # -- if (ta,tb) == (Record, Record): c = a.d.items() + b.d.items() return Record(**dict(c)) # -- if (ta,tb) == (Fixed, Range): return Range(min(a.val, b.lower), max(a.val, b.upper)) if (ta,tb) == (Range, Fixed): return Range(min(a.lower, b.val), max(a.val, b.val)) if (ta,tb) == (Range, Range): return Range(min(a.lower, b.lower), max(b.upper, b.upper)) # -- #if (ta,tb) == (Union, Union): #return Union(a.parameters + b.parameters) # -- if (ta,tb) == (CType, CType): return CType.from_str(promote_types(a.name, b.name).name) raise Incommensurable(a,b)
from numpy import promote_types from blaze.datashape.coretypes import TypeVar from blaze.expr.typeinference import infer class Incommensurable(TypeError): pass def unify(sig, concrete=True): """ Unification of Datashapes. """ resolved = infer(sig) if all(not isinstance(a, TypeVar) for a in resolved): return resolved
Remove very old type unifier, for robust one
Remove very old type unifier, for robust one
Python
bsd-2-clause
seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core
from numpy import promote_types - from coretypes import Fixed, Range, TypeVar, Record, \ - CType, Enum, top, dynamic + from blaze.datashape.coretypes import TypeVar + from blaze.expr.typeinference import infer - class Incommensurable(Exception): + class Incommensurable(TypeError): + pass - def __init__(self, space, dim): - self.space = space - self.dim = dim + def unify(sig, concrete=True): - def __str__(self): - return "No way of unifying (%s) (%s)" % ( - self.space, self.dim - ) - - def unify(a, b): """ Unification of Datashapes. """ - ta = type(a) - tb = type(b) + resolved = infer(sig) + if all(not isinstance(a, TypeVar) for a in resolved): + return resolved - # -- - - # Unification over BlazeT has two zeros - - if ta == top or tb == top: - return top - - if ta == dynamic or tb == dynamic: - return top - - # -- - - if (ta,tb) == (Fixed, Fixed): - if a.val == b.val: - return Fixed(a.val) - else: - return Enum(a.val, b.val) - - # -- - - if (ta,tb) == (TypeVar, Fixed): - return TypeVar('x0') - - if (ta,tb) == (Fixed, TypeVar): - return TypeVar('x0') - - # -- - - if (ta,tb) == (Record, Record): - c = a.d.items() + b.d.items() - return Record(**dict(c)) - - # -- - - if (ta,tb) == (Fixed, Range): - return Range(min(a.val, b.lower), max(a.val, b.upper)) - - if (ta,tb) == (Range, Fixed): - return Range(min(a.lower, b.val), max(a.val, b.val)) - - if (ta,tb) == (Range, Range): - return Range(min(a.lower, b.lower), max(b.upper, b.upper)) - - # -- - - #if (ta,tb) == (Union, Union): - #return Union(a.parameters + b.parameters) - - # -- - - if (ta,tb) == (CType, CType): - return CType.from_str(promote_types(a.name, b.name).name) - - raise Incommensurable(a,b) -
Remove very old type unifier, for robust one
## Code Before: from numpy import promote_types from coretypes import Fixed, Range, TypeVar, Record, \ CType, Enum, top, dynamic class Incommensurable(Exception): def __init__(self, space, dim): self.space = space self.dim = dim def __str__(self): return "No way of unifying (%s) (%s)" % ( self.space, self.dim ) def unify(a, b): """ Unification of Datashapes. """ ta = type(a) tb = type(b) # -- # Unification over BlazeT has two zeros if ta == top or tb == top: return top if ta == dynamic or tb == dynamic: return top # -- if (ta,tb) == (Fixed, Fixed): if a.val == b.val: return Fixed(a.val) else: return Enum(a.val, b.val) # -- if (ta,tb) == (TypeVar, Fixed): return TypeVar('x0') if (ta,tb) == (Fixed, TypeVar): return TypeVar('x0') # -- if (ta,tb) == (Record, Record): c = a.d.items() + b.d.items() return Record(**dict(c)) # -- if (ta,tb) == (Fixed, Range): return Range(min(a.val, b.lower), max(a.val, b.upper)) if (ta,tb) == (Range, Fixed): return Range(min(a.lower, b.val), max(a.val, b.val)) if (ta,tb) == (Range, Range): return Range(min(a.lower, b.lower), max(b.upper, b.upper)) # -- #if (ta,tb) == (Union, Union): #return Union(a.parameters + b.parameters) # -- if (ta,tb) == (CType, CType): return CType.from_str(promote_types(a.name, b.name).name) raise Incommensurable(a,b) ## Instruction: Remove very old type unifier, for robust one ## Code After: from numpy import promote_types from blaze.datashape.coretypes import TypeVar from blaze.expr.typeinference import infer class Incommensurable(TypeError): pass def unify(sig, concrete=True): """ Unification of Datashapes. """ resolved = infer(sig) if all(not isinstance(a, TypeVar) for a in resolved): return resolved
# ... existing code ... from numpy import promote_types from blaze.datashape.coretypes import TypeVar from blaze.expr.typeinference import infer class Incommensurable(TypeError): pass def unify(sig, concrete=True): """ # ... modified code ... """ resolved = infer(sig) if all(not isinstance(a, TypeVar) for a in resolved): return resolved # ... rest of the code ...
3ef77edcbf4b3268399f439b89f15ef087bd06bb
chamber/utils/logging.py
chamber/utils/logging.py
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
Set default value for json.dumps
Set default value for json.dumps Use default value when type cannot be serialized.
Python
bsd-3-clause
druids/django-chamber
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } - record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) + record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, + default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
Set default value for json.dumps
## Code Before: import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True ## Instruction: Set default value for json.dumps ## Code After: import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
# ... existing code ... } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) # ... rest of the code ...
12e924cd617811cb763857a9abf14e8b3487f5a1
ckanext/nhm/routes/bbcm.py
ckanext/nhm/routes/bbcm.py
from flask import Blueprint from ckan.plugins import toolkit # bbcm = big butterfly count map :) # create a flask blueprint with a prefix blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__, url_prefix=u'/big-butterfly-count-map') @blueprint.route(u'') @blueprint.route(u'/') def bbcm(): ''' Render the big butterfly count map page. ''' return toolkit.render(u'bbcm.html', {})
from flask import Blueprint from ckan.plugins import toolkit # bbcm = big butterfly count map :) # create a flask blueprint blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__) @blueprint.route(u'/big-butterfly-count-map') def bbcm(): ''' Render the big butterfly count map page. ''' return toolkit.render(u'bbcm.html', {})
Allow the url to be accessed with or without a / on the end
Allow the url to be accessed with or without a / on the end
Python
mit
NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm
from flask import Blueprint from ckan.plugins import toolkit # bbcm = big butterfly count map :) - # create a flask blueprint with a prefix + # create a flask blueprint - blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__, + blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__) - url_prefix=u'/big-butterfly-count-map') + @blueprint.route(u'/big-butterfly-count-map') - @blueprint.route(u'') - @blueprint.route(u'/') def bbcm(): ''' Render the big butterfly count map page. ''' return toolkit.render(u'bbcm.html', {})
Allow the url to be accessed with or without a / on the end
## Code Before: from flask import Blueprint from ckan.plugins import toolkit # bbcm = big butterfly count map :) # create a flask blueprint with a prefix blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__, url_prefix=u'/big-butterfly-count-map') @blueprint.route(u'') @blueprint.route(u'/') def bbcm(): ''' Render the big butterfly count map page. ''' return toolkit.render(u'bbcm.html', {}) ## Instruction: Allow the url to be accessed with or without a / on the end ## Code After: from flask import Blueprint from ckan.plugins import toolkit # bbcm = big butterfly count map :) # create a flask blueprint blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__) @blueprint.route(u'/big-butterfly-count-map') def bbcm(): ''' Render the big butterfly count map page. ''' return toolkit.render(u'bbcm.html', {})
... # bbcm = big butterfly count map :) # create a flask blueprint blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__) ... @blueprint.route(u'/big-butterfly-count-map') def bbcm(): ...
d1e56cfcd11bcd509d8fa3954c00e06a84bddd87
synapse/storage/engines/__init__.py
synapse/storage/engines/__init__.py
from ._base import IncorrectDatabaseSetup from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine import importlib import platform SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, "psycopg2": PostgresEngine, } def create_engine(database_config): name = database_config["name"] engine_class = SUPPORTED_MODULE.get(name, None) if engine_class: needs_pypy_hack = (name == "psycopg2" and platform.python_implementation() == "PyPy") if needs_pypy_hack: module = importlib.import_module("psycopg2cffi") else: module = importlib.import_module(name) return engine_class(module, database_config) raise RuntimeError( "Unsupported database engine '%s'" % (name,) ) __all__ = ["create_engine", "IncorrectDatabaseSetup"]
from ._base import IncorrectDatabaseSetup from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine import importlib import platform SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, "psycopg2": PostgresEngine, } def create_engine(database_config): name = database_config["name"] engine_class = SUPPORTED_MODULE.get(name, None) if engine_class: # pypy requires psycopg2cffi rather than psycopg2 if (name == "psycopg2" and platform.python_implementation() == "PyPy"): name = "psycopg2cffi" module = importlib.import_module(name) return engine_class(module, database_config) raise RuntimeError( "Unsupported database engine '%s'" % (name,) ) __all__ = ["create_engine", "IncorrectDatabaseSetup"]
Fix pep8 error on psycopg2cffi hack
Fix pep8 error on psycopg2cffi hack
Python
apache-2.0
matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse
from ._base import IncorrectDatabaseSetup from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine import importlib import platform SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, "psycopg2": PostgresEngine, } def create_engine(database_config): name = database_config["name"] engine_class = SUPPORTED_MODULE.get(name, None) if engine_class: + # pypy requires psycopg2cffi rather than psycopg2 - needs_pypy_hack = (name == "psycopg2" and + if (name == "psycopg2" and - platform.python_implementation() == "PyPy") + platform.python_implementation() == "PyPy"): + name = "psycopg2cffi" - if needs_pypy_hack: - module = importlib.import_module("psycopg2cffi") - else: - module = importlib.import_module(name) + module = importlib.import_module(name) return engine_class(module, database_config) raise RuntimeError( "Unsupported database engine '%s'" % (name,) ) __all__ = ["create_engine", "IncorrectDatabaseSetup"]
Fix pep8 error on psycopg2cffi hack
## Code Before: from ._base import IncorrectDatabaseSetup from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine import importlib import platform SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, "psycopg2": PostgresEngine, } def create_engine(database_config): name = database_config["name"] engine_class = SUPPORTED_MODULE.get(name, None) if engine_class: needs_pypy_hack = (name == "psycopg2" and platform.python_implementation() == "PyPy") if needs_pypy_hack: module = importlib.import_module("psycopg2cffi") else: module = importlib.import_module(name) return engine_class(module, database_config) raise RuntimeError( "Unsupported database engine '%s'" % (name,) ) __all__ = ["create_engine", "IncorrectDatabaseSetup"] ## Instruction: Fix pep8 error on psycopg2cffi hack ## Code After: from ._base import IncorrectDatabaseSetup from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine import importlib import platform SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, "psycopg2": PostgresEngine, } def create_engine(database_config): name = database_config["name"] engine_class = SUPPORTED_MODULE.get(name, None) if engine_class: # pypy requires psycopg2cffi rather than psycopg2 if (name == "psycopg2" and platform.python_implementation() == "PyPy"): name = "psycopg2cffi" module = importlib.import_module(name) return engine_class(module, database_config) raise RuntimeError( "Unsupported database engine '%s'" % (name,) ) __all__ = ["create_engine", "IncorrectDatabaseSetup"]
# ... existing code ... if engine_class: # pypy requires psycopg2cffi rather than psycopg2 if (name == "psycopg2" and platform.python_implementation() == "PyPy"): name = "psycopg2cffi" module = importlib.import_module(name) return engine_class(module, database_config) # ... rest of the code ...
e422f77898853fc759d3828c4053b799cd2b1fa3
plumeria/plugins/bot_control.py
plumeria/plugins/bot_control.py
from plumeria.command import commands, CommandError from plumeria.message.lists import build_list from plumeria.perms import owners_only from plumeria.transport import transports @commands.register('accept invite', category='Discord') @owners_only async def accept_invite(message): """ Accept an invite to join a server. Example:: /accept invite https://discord.gg/00000 """ url = message.content.strip() results = [] if not len(url): raise CommandError("Supply an invite URL.") for transport in transports.transports.values(): if hasattr(transport, 'accept_invite'): try: await transport.accept_invite(url) results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}')) except Exception as e: results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e)))) else: results.append((transport.id, "\N{WARNING SIGN} No support for invite links")) if len(results): return build_list(["**{}:** {}".format(e[0], e[1]) for e in results]) else: raise CommandError("No transports available.")
from plumeria.command import commands, CommandError from plumeria.message.lists import build_list from plumeria.perms import owners_only from plumeria.transport import transports @commands.register('join', category='Discord') @owners_only async def join(message): """ Accept an invite to join a server. Example:: /join https://discord.gg/00000 """ url = message.content.strip() results = [] if not len(url): raise CommandError("Supply an invite URL.") for transport in transports.transports.values(): if hasattr(transport, 'accept_invite'): try: await transport.accept_invite(url) results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}')) except Exception as e: results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e)))) else: results.append((transport.id, "\N{WARNING SIGN} No support for invite links")) if len(results): return build_list(["**{}:** {}".format(e[0], e[1]) for e in results]) else: raise CommandError("No transports available.")
Use /join instead of /accept invite.
Use /join instead of /accept invite.
Python
mit
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
from plumeria.command import commands, CommandError from plumeria.message.lists import build_list from plumeria.perms import owners_only from plumeria.transport import transports - @commands.register('accept invite', category='Discord') + @commands.register('join', category='Discord') @owners_only - async def accept_invite(message): + async def join(message): """ Accept an invite to join a server. Example:: - /accept invite https://discord.gg/00000 + /join https://discord.gg/00000 """ url = message.content.strip() results = [] if not len(url): raise CommandError("Supply an invite URL.") for transport in transports.transports.values(): if hasattr(transport, 'accept_invite'): try: await transport.accept_invite(url) results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}')) except Exception as e: results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e)))) else: results.append((transport.id, "\N{WARNING SIGN} No support for invite links")) if len(results): return build_list(["**{}:** {}".format(e[0], e[1]) for e in results]) else: raise CommandError("No transports available.")
Use /join instead of /accept invite.
## Code Before: from plumeria.command import commands, CommandError from plumeria.message.lists import build_list from plumeria.perms import owners_only from plumeria.transport import transports @commands.register('accept invite', category='Discord') @owners_only async def accept_invite(message): """ Accept an invite to join a server. Example:: /accept invite https://discord.gg/00000 """ url = message.content.strip() results = [] if not len(url): raise CommandError("Supply an invite URL.") for transport in transports.transports.values(): if hasattr(transport, 'accept_invite'): try: await transport.accept_invite(url) results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}')) except Exception as e: results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e)))) else: results.append((transport.id, "\N{WARNING SIGN} No support for invite links")) if len(results): return build_list(["**{}:** {}".format(e[0], e[1]) for e in results]) else: raise CommandError("No transports available.") ## Instruction: Use /join instead of /accept invite. ## Code After: from plumeria.command import commands, CommandError from plumeria.message.lists import build_list from plumeria.perms import owners_only from plumeria.transport import transports @commands.register('join', category='Discord') @owners_only async def join(message): """ Accept an invite to join a server. Example:: /join https://discord.gg/00000 """ url = message.content.strip() results = [] if not len(url): raise CommandError("Supply an invite URL.") for transport in transports.transports.values(): if hasattr(transport, 'accept_invite'): try: await transport.accept_invite(url) results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}')) except Exception as e: results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e)))) else: results.append((transport.id, "\N{WARNING SIGN} No support for invite links")) if len(results): return build_list(["**{}:** {}".format(e[0], e[1]) for e in results]) else: raise CommandError("No transports available.")
# ... existing code ... @commands.register('join', category='Discord') @owners_only async def join(message): """ # ... modified code ... /join https://discord.gg/00000 """ # ... rest of the code ...
932606e41fa5289551a026ae993ececbd117ca7d
openedx/core/djangoapps/appsembler/tpa_admin/serializers.py
openedx/core/djangoapps/appsembler/tpa_admin/serializers.py
import json from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig from rest_framework import serializers class JSONSerializerField(serializers.Field): """ Serializer for JSONField -- required to make field writable""" def to_internal_value(self, data): return json.dumps(data) def to_representation(self, value): return value class SAMLConfigurationSerializer(serializers.ModelSerializer): other_config_str = JSONSerializerField() class Meta: model = SAMLConfiguration fields = ( 'id', 'site', 'enabled','entity_id', 'private_key', 'public_key', 'org_info_str', 'other_config_str' ) class SAMLProviderConfigSerializer(serializers.ModelSerializer): class Meta: model = SAMLProviderConfig fields = ( 'id', 'site', 'enabled', 'name', 'icon_class', 'icon_image', 'secondary', 'skip_registration_form', 'visible', 'skip_email_verification', 'idp_slug', 'entity_id', 'metadata_source', 'attr_user_permanent_id', 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings' )
import json from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData from rest_framework import serializers class JSONSerializerField(serializers.Field): """ Serializer for JSONField -- required to make field writable""" def to_internal_value(self, data): return json.dumps(data) def to_representation(self, value): return value class SAMLConfigurationSerializer(serializers.ModelSerializer): other_config_str = JSONSerializerField() class Meta: model = SAMLConfiguration fields = ( 'id', 'site', 'enabled','entity_id', 'private_key', 'public_key', 'org_info_str', 'other_config_str' ) class SAMLProviderConfigSerializer(serializers.ModelSerializer): metadata_ready = serializers.SerializerMethodField() class Meta: model = SAMLProviderConfig fields = ( 'id', 'site', 'enabled', 'name', 'icon_class', 'icon_image', 'secondary', 'skip_registration_form', 'visible', 'skip_email_verification', 'idp_slug', 'entity_id', 'metadata_source', 'attr_user_permanent_id', 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings', 'metadata_ready' ) def get_metadata_ready(self, obj): """ Do we have cached metadata for this SAML provider? """ if not obj.is_active: return None # N/A data = SAMLProviderData.current(obj.entity_id) return bool(data and data.is_valid())
Add metadata ready field to IdP serializer
Add metadata ready field to IdP serializer
Python
agpl-3.0
appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform
import json - from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig + from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData from rest_framework import serializers class JSONSerializerField(serializers.Field): """ Serializer for JSONField -- required to make field writable""" def to_internal_value(self, data): return json.dumps(data) def to_representation(self, value): return value class SAMLConfigurationSerializer(serializers.ModelSerializer): other_config_str = JSONSerializerField() class Meta: model = SAMLConfiguration fields = ( 'id', 'site', 'enabled','entity_id', 'private_key', 'public_key', 'org_info_str', 'other_config_str' ) class SAMLProviderConfigSerializer(serializers.ModelSerializer): + metadata_ready = serializers.SerializerMethodField() class Meta: model = SAMLProviderConfig fields = ( 'id', 'site', 'enabled', 'name', 'icon_class', 'icon_image', 'secondary', 'skip_registration_form', 'visible', 'skip_email_verification', 'idp_slug', 'entity_id', 'metadata_source', 'attr_user_permanent_id', - 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings' + 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings', + 'metadata_ready' ) + def get_metadata_ready(self, obj): + """ Do we have cached metadata for this SAML provider? """ + if not obj.is_active: + return None # N/A + data = SAMLProviderData.current(obj.entity_id) + return bool(data and data.is_valid()) +
Add metadata ready field to IdP serializer
## Code Before: import json from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig from rest_framework import serializers class JSONSerializerField(serializers.Field): """ Serializer for JSONField -- required to make field writable""" def to_internal_value(self, data): return json.dumps(data) def to_representation(self, value): return value class SAMLConfigurationSerializer(serializers.ModelSerializer): other_config_str = JSONSerializerField() class Meta: model = SAMLConfiguration fields = ( 'id', 'site', 'enabled','entity_id', 'private_key', 'public_key', 'org_info_str', 'other_config_str' ) class SAMLProviderConfigSerializer(serializers.ModelSerializer): class Meta: model = SAMLProviderConfig fields = ( 'id', 'site', 'enabled', 'name', 'icon_class', 'icon_image', 'secondary', 'skip_registration_form', 'visible', 'skip_email_verification', 'idp_slug', 'entity_id', 'metadata_source', 'attr_user_permanent_id', 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings' ) ## Instruction: Add metadata ready field to IdP serializer ## Code After: import json from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData from rest_framework import serializers class JSONSerializerField(serializers.Field): """ Serializer for JSONField -- required to make field writable""" def to_internal_value(self, data): return json.dumps(data) def to_representation(self, value): return value class SAMLConfigurationSerializer(serializers.ModelSerializer): other_config_str = JSONSerializerField() class Meta: model = SAMLConfiguration fields = ( 'id', 'site', 'enabled','entity_id', 'private_key', 'public_key', 'org_info_str', 'other_config_str' ) class SAMLProviderConfigSerializer(serializers.ModelSerializer): metadata_ready = serializers.SerializerMethodField() class Meta: model = SAMLProviderConfig fields = ( 'id', 'site', 'enabled', 'name', 'icon_class', 'icon_image', 'secondary', 'skip_registration_form', 'visible', 'skip_email_verification', 'idp_slug', 'entity_id', 'metadata_source', 'attr_user_permanent_id', 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings', 'metadata_ready' ) def get_metadata_ready(self, obj): """ Do we have cached metadata for this SAML provider? """ if not obj.is_active: return None # N/A data = SAMLProviderData.current(obj.entity_id) return bool(data and data.is_valid())
... from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData ... class SAMLProviderConfigSerializer(serializers.ModelSerializer): metadata_ready = serializers.SerializerMethodField() ... 'visible', 'skip_email_verification', 'idp_slug', 'entity_id', 'metadata_source', 'attr_user_permanent_id', 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'other_settings', 'metadata_ready' ) def get_metadata_ready(self, obj): """ Do we have cached metadata for this SAML provider? """ if not obj.is_active: return None # N/A data = SAMLProviderData.current(obj.entity_id) return bool(data and data.is_valid()) ...
cdc6390ec88a14b339cb336fcc0d77e747aae99a
sieve/sieve.py
sieve/sieve.py
def sieve(n): if n < 2: return [] not_prime = set() prime = [2] for i in range(3, n+1, 2): if i not in not_prime: prime.append(i) not_prime.update(range(i*i, n+1, i)) return prime
def sieve(n): return list(primes(n)) def primes(n): if n < 2: raise StopIteration yield 2 not_prime = set() for i in range(3, n+1, 2): if i not in not_prime: not_prime.update(range(i*i, n+1, i)) yield i
Revert back to a generator - it's actually slight faster
Revert back to a generator - it's actually slight faster
Python
agpl-3.0
CubicComet/exercism-python-solutions
def sieve(n): + return list(primes(n)) + + + def primes(n): if n < 2: - return [] + raise StopIteration + yield 2 not_prime = set() - prime = [2] for i in range(3, n+1, 2): if i not in not_prime: - prime.append(i) not_prime.update(range(i*i, n+1, i)) - return prime + yield i
Revert back to a generator - it's actually slight faster
## Code Before: def sieve(n): if n < 2: return [] not_prime = set() prime = [2] for i in range(3, n+1, 2): if i not in not_prime: prime.append(i) not_prime.update(range(i*i, n+1, i)) return prime ## Instruction: Revert back to a generator - it's actually slight faster ## Code After: def sieve(n): return list(primes(n)) def primes(n): if n < 2: raise StopIteration yield 2 not_prime = set() for i in range(3, n+1, 2): if i not in not_prime: not_prime.update(range(i*i, n+1, i)) yield i
// ... existing code ... def sieve(n): return list(primes(n)) def primes(n): if n < 2: raise StopIteration yield 2 not_prime = set() for i in range(3, n+1, 2): // ... modified code ... if i not in not_prime: not_prime.update(range(i*i, n+1, i)) yield i // ... rest of the code ...
e69559b81e9b52eb0834df67b0197aa0f734db3c
wafer/talks/admin.py
wafer/talks/admin.py
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin)
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
Make talk status editale from the talk list overview
Make talk status editale from the talk list overview
Python
isc
CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') + list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
Make talk status editale from the talk list overview
## Code Before: from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin) ## Instruction: Make talk status editale from the talk list overview ## Code After: from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
# ... existing code ... list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) # ... rest of the code ...
0ba9fa847a8b605363b298ecad40cb2fc5870cbb
build_modules.py
build_modules.py
import os, sys, subprocess, shutil def check_for_module_builder(): if os.path.exists("voxel_native/scripts/"): return print("Downloading P3DModuleBuilder...") cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"] try: output = subprocess.check_output(cmd, stderr=sys.stderr) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Couldn't download P3DModuleBuilder.") sys.exit(-1) def build_modules(): print("Building native modules...") check_for_module_builder() cmd = [sys.executable, "-B", "-m", "voxel_native.build"] try: output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Error building the native modules.") sys.exit(-1) shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") if __name__ == "__main__": build_modules()
import os, sys, subprocess, shutil def check_for_module_builder(): if os.path.exists("voxel_native/scripts/"): return print("Downloading P3DModuleBuilder...") cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"] try: output = subprocess.check_output(cmd, stderr=sys.stderr) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Couldn't download P3DModuleBuilder.") sys.exit(-1) def build_modules(): print("Building native modules...") check_for_module_builder() cmd = [sys.executable, "-B", "-m", "voxel_native.build"] try: output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Error building the native modules.") sys.exit(-1) from voxel_native.scripts.common import is_macos, is_windows, is_linux if is_windows(): shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") elif is_macos() or is_linux(): shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so") if __name__ == "__main__": build_modules()
Update build script to work correctly on macOS and linux.
Update build script to work correctly on macOS and linux.
Python
mit
treamology/panda3d-voxels,treamology/panda3d-voxels,treamology/panda3d-voxels
import os, sys, subprocess, shutil def check_for_module_builder(): if os.path.exists("voxel_native/scripts/"): return print("Downloading P3DModuleBuilder...") cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"] try: output = subprocess.check_output(cmd, stderr=sys.stderr) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Couldn't download P3DModuleBuilder.") sys.exit(-1) def build_modules(): print("Building native modules...") check_for_module_builder() cmd = [sys.executable, "-B", "-m", "voxel_native.build"] try: output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Error building the native modules.") sys.exit(-1) + from voxel_native.scripts.common import is_macos, is_windows, is_linux + if is_windows(): - shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") + shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") - + elif is_macos() or is_linux(): + shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so") if __name__ == "__main__": build_modules()
Update build script to work correctly on macOS and linux.
## Code Before: import os, sys, subprocess, shutil def check_for_module_builder(): if os.path.exists("voxel_native/scripts/"): return print("Downloading P3DModuleBuilder...") cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"] try: output = subprocess.check_output(cmd, stderr=sys.stderr) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Couldn't download P3DModuleBuilder.") sys.exit(-1) def build_modules(): print("Building native modules...") check_for_module_builder() cmd = [sys.executable, "-B", "-m", "voxel_native.build"] try: output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Error building the native modules.") sys.exit(-1) shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") if __name__ == "__main__": build_modules() ## Instruction: Update build script to work correctly on macOS and linux. ## Code After: import os, sys, subprocess, shutil def check_for_module_builder(): if os.path.exists("voxel_native/scripts/"): return print("Downloading P3DModuleBuilder...") cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"] try: output = subprocess.check_output(cmd, stderr=sys.stderr) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Couldn't download P3DModuleBuilder.") sys.exit(-1) def build_modules(): print("Building native modules...") check_for_module_builder() cmd = [sys.executable, "-B", "-m", "voxel_native.build"] try: output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True) except subprocess.CalledProcessError as errorMsg: print(errorMsg) print("Error building the native modules.") sys.exit(-1) from voxel_native.scripts.common import is_macos, is_windows, is_linux if is_windows(): shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") elif is_macos() or is_linux(): shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so") if __name__ == "__main__": build_modules()
... from voxel_native.scripts.common import is_macos, is_windows, is_linux if is_windows(): shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd") elif is_macos() or is_linux(): shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so") ...
57b1dbc45e7b78f7aa272fd5b7d4bd022850beb9
lametro/migrations/0007_update_packet_links.py
lametro/migrations/0007_update_packet_links.py
from django.db import migrations def resave_packets(apps, schema_editor): ''' Re-save all existing packets to update their URLs based on the new value of MERGE_HOST. ''' for packet in ('BillPacket', 'EventPacket'): packet_model = apps.get_model('lametro', packet) for p in packet_model.objects.all(): p.save(merge=False) class Migration(migrations.Migration): dependencies = [ ('lametro', '0006_add_plan_program_policy'), ] operations = [ migrations.RunPython(resave_packets), ]
from django.db import migrations def resave_packets(apps, schema_editor): ''' Re-save all existing packets to update their URLs based on the new value of MERGE_HOST. ''' # for packet in ('BillPacket', 'EventPacket'): # packet_model = apps.get_model('lametro', packet) # for p in packet_model.objects.all(): # p.save(merge=False) return class Migration(migrations.Migration): dependencies = [ ('lametro', '0006_add_plan_program_policy'), ] operations = [ migrations.RunPython(resave_packets), ]
Disable data migration for deployment
Disable data migration for deployment
Python
mit
datamade/la-metro-councilmatic,datamade/la-metro-councilmatic,datamade/la-metro-councilmatic,datamade/la-metro-councilmatic
from django.db import migrations def resave_packets(apps, schema_editor): ''' Re-save all existing packets to update their URLs based on the new value of MERGE_HOST. ''' - for packet in ('BillPacket', 'EventPacket'): + # for packet in ('BillPacket', 'EventPacket'): - packet_model = apps.get_model('lametro', packet) + # packet_model = apps.get_model('lametro', packet) - for p in packet_model.objects.all(): + # for p in packet_model.objects.all(): - p.save(merge=False) + # p.save(merge=False) + return class Migration(migrations.Migration): dependencies = [ ('lametro', '0006_add_plan_program_policy'), ] operations = [ migrations.RunPython(resave_packets), ]
Disable data migration for deployment
## Code Before: from django.db import migrations def resave_packets(apps, schema_editor): ''' Re-save all existing packets to update their URLs based on the new value of MERGE_HOST. ''' for packet in ('BillPacket', 'EventPacket'): packet_model = apps.get_model('lametro', packet) for p in packet_model.objects.all(): p.save(merge=False) class Migration(migrations.Migration): dependencies = [ ('lametro', '0006_add_plan_program_policy'), ] operations = [ migrations.RunPython(resave_packets), ] ## Instruction: Disable data migration for deployment ## Code After: from django.db import migrations def resave_packets(apps, schema_editor): ''' Re-save all existing packets to update their URLs based on the new value of MERGE_HOST. ''' # for packet in ('BillPacket', 'EventPacket'): # packet_model = apps.get_model('lametro', packet) # for p in packet_model.objects.all(): # p.save(merge=False) return class Migration(migrations.Migration): dependencies = [ ('lametro', '0006_add_plan_program_policy'), ] operations = [ migrations.RunPython(resave_packets), ]
... ''' # for packet in ('BillPacket', 'EventPacket'): # packet_model = apps.get_model('lametro', packet) # for p in packet_model.objects.all(): # p.save(merge=False) return ...
2c965c0a75be129f429e40ade34ef608f8ceea27
micropress/urls.py
micropress/urls.py
from django.conf.urls.defaults import * urlpatterns = patterns('micropress.views', (r'^$', 'article_list'), (r'^issue/(?P<issue>\d+)/$', 'issue_list'), (r'^article/(?P<slug>[-\w]+)/$', 'article_detail'), #(r'^new/$', 'article_create'), )
from django.conf.urls.defaults import * urlpatterns = patterns('micropress.views', (r'^$', 'article_list'), url(r'^issue/(?P<issue>\d+)/$', 'article_list', name='issue_list'), (r'^article/(?P<slug>[-\w]+)/$', 'article_detail'), #(r'^new/$', 'article_create'), )
Fix url spec for article list by issue.
Fix url spec for article list by issue.
Python
mit
jbradberry/django-micro-press,jbradberry/django-micro-press
from django.conf.urls.defaults import * urlpatterns = patterns('micropress.views', (r'^$', 'article_list'), - (r'^issue/(?P<issue>\d+)/$', 'issue_list'), + url(r'^issue/(?P<issue>\d+)/$', 'article_list', name='issue_list'), (r'^article/(?P<slug>[-\w]+)/$', 'article_detail'), #(r'^new/$', 'article_create'), )
Fix url spec for article list by issue.
## Code Before: from django.conf.urls.defaults import * urlpatterns = patterns('micropress.views', (r'^$', 'article_list'), (r'^issue/(?P<issue>\d+)/$', 'issue_list'), (r'^article/(?P<slug>[-\w]+)/$', 'article_detail'), #(r'^new/$', 'article_create'), ) ## Instruction: Fix url spec for article list by issue. ## Code After: from django.conf.urls.defaults import * urlpatterns = patterns('micropress.views', (r'^$', 'article_list'), url(r'^issue/(?P<issue>\d+)/$', 'article_list', name='issue_list'), (r'^article/(?P<slug>[-\w]+)/$', 'article_detail'), #(r'^new/$', 'article_create'), )
// ... existing code ... (r'^$', 'article_list'), url(r'^issue/(?P<issue>\d+)/$', 'article_list', name='issue_list'), (r'^article/(?P<slug>[-\w]+)/$', 'article_detail'), // ... rest of the code ...
1c3ff4552b82183263ead0aefe47b867a7b2022e
10_anaconda/jupyter_notebook_config.py
10_anaconda/jupyter_notebook_config.py
from jupyter_core.paths import jupyter_data_dir import subprocess import os import errno import stat c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.port = 8888 c.NotebookApp.open_browser = False # Generate a self-signed certificate if 'GEN_CERT' in os.environ: dir_name = jupyter_data_dir() pem_file = os.path.join(dir_name, 'notebook.pem') try: os.makedirs(dir_name) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(dir_name): pass else: raise # Generate a certificate if one doesn't exist on disk subprocess.check_call(['openssl', 'req', '-new', '-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509', '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', '-keyout', pem_file, '-out', pem_file]) # Restrict access to the file os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) c.NotebookApp.certfile = pem_file
from jupyter_core.paths import jupyter_data_dir import subprocess import os import os.path import errno import stat c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.port = 8888 c.NotebookApp.open_browser = False # Generate a self-signed certificate if 'GEN_CERT' in os.environ: dir_name = jupyter_data_dir() pem_file = os.path.join(dir_name, 'notebook.pem') if not os.path.isfile(pem_file): try: os.makedirs(dir_name) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(dir_name): pass else: raise # Generate a certificate if one doesn't exist on disk subprocess.check_call(['openssl', 'req', '-new', '-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509', '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', '-keyout', pem_file, '-out', pem_file]) # Restrict access to the file os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) c.NotebookApp.certfile = pem_file
Fix certificate regenerating each startup
Fix certificate regenerating each startup
Python
apache-2.0
LamDang/docker-datascience,LamDang/docker-datascience
from jupyter_core.paths import jupyter_data_dir import subprocess import os + import os.path import errno import stat c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.port = 8888 c.NotebookApp.open_browser = False # Generate a self-signed certificate if 'GEN_CERT' in os.environ: dir_name = jupyter_data_dir() pem_file = os.path.join(dir_name, 'notebook.pem') + if not os.path.isfile(pem_file): - try: + try: - os.makedirs(dir_name) + os.makedirs(dir_name) - except OSError as exc: # Python >2.5 + except OSError as exc: # Python >2.5 - if exc.errno == errno.EEXIST and os.path.isdir(dir_name): + if exc.errno == errno.EEXIST and os.path.isdir(dir_name): - pass + pass - else: + else: - raise + raise - # Generate a certificate if one doesn't exist on disk + # Generate a certificate if one doesn't exist on disk - subprocess.check_call(['openssl', 'req', '-new', + subprocess.check_call(['openssl', 'req', '-new', - '-newkey', 'rsa:2048', + '-newkey', 'rsa:2048', - '-days', '365', + '-days', '365', - '-nodes', '-x509', + '-nodes', '-x509', - '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', + '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', - '-keyout', pem_file, + '-keyout', pem_file, - '-out', pem_file]) + '-out', pem_file]) - # Restrict access to the file + # Restrict access to the file - os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) + os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) c.NotebookApp.certfile = pem_file
Fix certificate regenerating each startup
## Code Before: from jupyter_core.paths import jupyter_data_dir import subprocess import os import errno import stat c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.port = 8888 c.NotebookApp.open_browser = False # Generate a self-signed certificate if 'GEN_CERT' in os.environ: dir_name = jupyter_data_dir() pem_file = os.path.join(dir_name, 'notebook.pem') try: os.makedirs(dir_name) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(dir_name): pass else: raise # Generate a certificate if one doesn't exist on disk subprocess.check_call(['openssl', 'req', '-new', '-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509', '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', '-keyout', pem_file, '-out', pem_file]) # Restrict access to the file os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) c.NotebookApp.certfile = pem_file ## Instruction: Fix certificate regenerating each startup ## Code After: from jupyter_core.paths import jupyter_data_dir import subprocess import os import os.path import errno import stat c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.port = 8888 c.NotebookApp.open_browser = False # Generate a self-signed certificate if 'GEN_CERT' in os.environ: dir_name = jupyter_data_dir() pem_file = os.path.join(dir_name, 'notebook.pem') if not os.path.isfile(pem_file): try: os.makedirs(dir_name) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(dir_name): pass else: raise # Generate a certificate if one doesn't exist on disk subprocess.check_call(['openssl', 'req', '-new', '-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509', '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', '-keyout', pem_file, '-out', pem_file]) # Restrict access to the file os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) c.NotebookApp.certfile = pem_file
... import os import os.path import errno ... pem_file = os.path.join(dir_name, 'notebook.pem') if not os.path.isfile(pem_file): try: os.makedirs(dir_name) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(dir_name): pass else: raise # Generate a certificate if one doesn't exist on disk subprocess.check_call(['openssl', 'req', '-new', '-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509', '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', '-keyout', pem_file, '-out', pem_file]) # Restrict access to the file os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) c.NotebookApp.certfile = pem_file ...
e824d8ad284603b73df48f1a413b129280318937
examples/annotation.py
examples/annotation.py
class Root(object): def __init__(self, context): self._ctx = context def mul(self, a: int = None, b: int = None) -> 'json': if not a and not b: return dict(message="Pass arguments a and b to multiply them together!") return dict(answer=a * b) if __name__ == '__main__': from marrow.server.http import HTTPServer from web.core.application import Application from web.ext.template import TemplateExtension from web.ext.cast import CastExtension HTTPServer('127.0.0.1', 8080, application=Application(Root, dict(extensions=dict( template = TemplateExtension(), typecast = CastExtension() )))).start()
class Root(object): def __init__(self, context): self._ctx = context def mul(self, a: int = None, b: int = None) -> 'json': """Multiply two values together and return the result via JSON. Python 3 function annotations are used to ensure that the arguments are integers. This requires the functionality of web.ext.cast:CastExtension. The return value annotation is handled by web.ext.template:TemplateExtension and may be the name of a serialization engine or template path. (The trailing colon may be omitted for serialization when used this way.) There are two ways to execute this method: * POST http://localhost:8080/mul * GET http://localhost:8080/mul?a=27&b=42 * GET http://localhost:8080/mul/27/42 The latter relies on the fact we can't descend past a callable method so the remaining path elements are used as positional arguments, whereas the others rely on keyword argument assignment from a form-encoded request body or query string arguments. (Security note: any form in request body takes presidence over query string arguments!) """ if not a and not b: return dict(message="Pass arguments a and b to multiply them together!") return dict(answer=a * b) if __name__ == '__main__': from marrow.server.http import HTTPServer from web.core.application import Application from web.ext.template import TemplateExtension from web.ext.cast import CastExtension # Configure the extensions needed for this example: config = dict( extensions = dict( template = TemplateExtension(), typecast = CastExtension() )) # Create the underlying WSGI application, passing the extensions to it. app = Application(Root, config) # Start the development HTTP server. HTTPServer('127.0.0.1', 8080, application=app).start()
Split the HTTPServer line into multiple.
Split the HTTPServer line into multiple. Also added comments and a docstring, since this is an example after all.
Python
mit
marrow/WebCore,marrow/WebCore
class Root(object): def __init__(self, context): self._ctx = context def mul(self, a: int = None, b: int = None) -> 'json': + """Multiply two values together and return the result via JSON. + + Python 3 function annotations are used to ensure that the arguments are integers. This requires the + functionality of web.ext.cast:CastExtension. + + The return value annotation is handled by web.ext.template:TemplateExtension and may be the name of + a serialization engine or template path. (The trailing colon may be omitted for serialization when used + this way.) + + There are two ways to execute this method: + + * POST http://localhost:8080/mul + * GET http://localhost:8080/mul?a=27&b=42 + * GET http://localhost:8080/mul/27/42 + + The latter relies on the fact we can't descend past a callable method so the remaining path elements are + used as positional arguments, whereas the others rely on keyword argument assignment from a form-encoded + request body or query string arguments. (Security note: any form in request body takes presidence over + query string arguments!) + """ + if not a and not b: return dict(message="Pass arguments a and b to multiply them together!") return dict(answer=a * b) if __name__ == '__main__': from marrow.server.http import HTTPServer from web.core.application import Application from web.ext.template import TemplateExtension from web.ext.cast import CastExtension + + # Configure the extensions needed for this example: + config = dict( + extensions = dict( + template = TemplateExtension(), + typecast = CastExtension() + )) + # Create the underlying WSGI application, passing the extensions to it. + app = Application(Root, config) - HTTPServer('127.0.0.1', 8080, application=Application(Root, dict(extensions=dict( - template = TemplateExtension(), - typecast = CastExtension() - )))).start() + # Start the development HTTP server. + HTTPServer('127.0.0.1', 8080, application=app).start() +
Split the HTTPServer line into multiple.
## Code Before: class Root(object): def __init__(self, context): self._ctx = context def mul(self, a: int = None, b: int = None) -> 'json': if not a and not b: return dict(message="Pass arguments a and b to multiply them together!") return dict(answer=a * b) if __name__ == '__main__': from marrow.server.http import HTTPServer from web.core.application import Application from web.ext.template import TemplateExtension from web.ext.cast import CastExtension HTTPServer('127.0.0.1', 8080, application=Application(Root, dict(extensions=dict( template = TemplateExtension(), typecast = CastExtension() )))).start() ## Instruction: Split the HTTPServer line into multiple. ## Code After: class Root(object): def __init__(self, context): self._ctx = context def mul(self, a: int = None, b: int = None) -> 'json': """Multiply two values together and return the result via JSON. Python 3 function annotations are used to ensure that the arguments are integers. This requires the functionality of web.ext.cast:CastExtension. The return value annotation is handled by web.ext.template:TemplateExtension and may be the name of a serialization engine or template path. (The trailing colon may be omitted for serialization when used this way.) There are two ways to execute this method: * POST http://localhost:8080/mul * GET http://localhost:8080/mul?a=27&b=42 * GET http://localhost:8080/mul/27/42 The latter relies on the fact we can't descend past a callable method so the remaining path elements are used as positional arguments, whereas the others rely on keyword argument assignment from a form-encoded request body or query string arguments. (Security note: any form in request body takes presidence over query string arguments!) """ if not a and not b: return dict(message="Pass arguments a and b to multiply them together!") return dict(answer=a * b) if __name__ == '__main__': from marrow.server.http import HTTPServer from web.core.application import Application from web.ext.template import TemplateExtension from web.ext.cast import CastExtension # Configure the extensions needed for this example: config = dict( extensions = dict( template = TemplateExtension(), typecast = CastExtension() )) # Create the underlying WSGI application, passing the extensions to it. app = Application(Root, config) # Start the development HTTP server. HTTPServer('127.0.0.1', 8080, application=app).start()
# ... existing code ... def mul(self, a: int = None, b: int = None) -> 'json': """Multiply two values together and return the result via JSON. Python 3 function annotations are used to ensure that the arguments are integers. This requires the functionality of web.ext.cast:CastExtension. The return value annotation is handled by web.ext.template:TemplateExtension and may be the name of a serialization engine or template path. (The trailing colon may be omitted for serialization when used this way.) There are two ways to execute this method: * POST http://localhost:8080/mul * GET http://localhost:8080/mul?a=27&b=42 * GET http://localhost:8080/mul/27/42 The latter relies on the fact we can't descend past a callable method so the remaining path elements are used as positional arguments, whereas the others rely on keyword argument assignment from a form-encoded request body or query string arguments. (Security note: any form in request body takes presidence over query string arguments!) """ if not a and not b: # ... modified code ... from web.ext.cast import CastExtension # Configure the extensions needed for this example: config = dict( extensions = dict( template = TemplateExtension(), typecast = CastExtension() )) # Create the underlying WSGI application, passing the extensions to it. app = Application(Root, config) # Start the development HTTP server. HTTPServer('127.0.0.1', 8080, application=app).start() # ... rest of the code ...
52e15ab96718a491f805eee6f7130d4f02530940
alfred/helpers.py
alfred/helpers.py
from alfred_db.models import User from flask import current_app from github import Github from requests_oauth2 import OAuth2 from .database import db def get_shell(): try: from IPython.frontend.terminal.embed import InteractiveShellEmbed except ImportError: import code return lambda **context: code.interact('', local=context) else: ipython = InteractiveShellEmbed(banner1='') return lambda **context: ipython(global_ns={}, local_ns=context) def get_oauth2_handler(): GITHUB = current_app.config['GITHUB'] return OAuth2( GITHUB['client_id'], GITHUB['client_secret'], GITHUB['auth_url'], '', GITHUB['authorize_url'], GITHUB['token_url'] ) def get_user_by_token(access_token): api = Github(access_token) github_user = api.get_user() user = db.session.query(User).filter_by(github_id=github_user.id).first() if user is None: user = User( github_access_token=access_token, github_id=github_user.id, name=github_user.name, email=github_user.email, login=github_user.login, ) else: user.github_access_token = access_token user.login = github_user.login db.session.add(user) db.session.commit() return user
from alfred_db.models import User from flask import current_app from github import Github from requests_oauth2 import OAuth2 from .database import db def get_shell(): try: from IPython.frontend.terminal.embed import InteractiveShellEmbed except ImportError: import code return lambda **context: code.interact('', local=context) else: ipython = InteractiveShellEmbed(banner1='') return lambda **context: ipython(global_ns={}, local_ns=context) def get_oauth2_handler(): GITHUB = current_app.config['GITHUB'] return OAuth2( GITHUB['client_id'], GITHUB['client_secret'], GITHUB['auth_url'], '', GITHUB['authorize_url'], GITHUB['token_url'] ) def get_user_by_token(access_token): api = Github(access_token) github_user = api.get_user() user = db.session.query(User).filter_by(github_id=github_user.id).first() if user is None: user = User( github_access_token=access_token, github_id=github_user.id, name=github_user.name, email=github_user.email, login=github_user.login, ) db.session.add(user) else: user.github_access_token = access_token user.login = github_user.login db.session.commit() return user
Add user to db.session only when it has been created
Add user to db.session only when it has been created
Python
isc
alfredhq/alfred,alfredhq/alfred
from alfred_db.models import User from flask import current_app from github import Github from requests_oauth2 import OAuth2 from .database import db def get_shell(): try: from IPython.frontend.terminal.embed import InteractiveShellEmbed except ImportError: import code return lambda **context: code.interact('', local=context) else: ipython = InteractiveShellEmbed(banner1='') return lambda **context: ipython(global_ns={}, local_ns=context) def get_oauth2_handler(): GITHUB = current_app.config['GITHUB'] return OAuth2( GITHUB['client_id'], GITHUB['client_secret'], GITHUB['auth_url'], '', GITHUB['authorize_url'], GITHUB['token_url'] ) def get_user_by_token(access_token): api = Github(access_token) github_user = api.get_user() user = db.session.query(User).filter_by(github_id=github_user.id).first() if user is None: user = User( github_access_token=access_token, github_id=github_user.id, name=github_user.name, email=github_user.email, login=github_user.login, ) + db.session.add(user) else: user.github_access_token = access_token user.login = github_user.login - db.session.add(user) db.session.commit() return user
Add user to db.session only when it has been created
## Code Before: from alfred_db.models import User from flask import current_app from github import Github from requests_oauth2 import OAuth2 from .database import db def get_shell(): try: from IPython.frontend.terminal.embed import InteractiveShellEmbed except ImportError: import code return lambda **context: code.interact('', local=context) else: ipython = InteractiveShellEmbed(banner1='') return lambda **context: ipython(global_ns={}, local_ns=context) def get_oauth2_handler(): GITHUB = current_app.config['GITHUB'] return OAuth2( GITHUB['client_id'], GITHUB['client_secret'], GITHUB['auth_url'], '', GITHUB['authorize_url'], GITHUB['token_url'] ) def get_user_by_token(access_token): api = Github(access_token) github_user = api.get_user() user = db.session.query(User).filter_by(github_id=github_user.id).first() if user is None: user = User( github_access_token=access_token, github_id=github_user.id, name=github_user.name, email=github_user.email, login=github_user.login, ) else: user.github_access_token = access_token user.login = github_user.login db.session.add(user) db.session.commit() return user ## Instruction: Add user to db.session only when it has been created ## Code After: from alfred_db.models import User from flask import current_app from github import Github from requests_oauth2 import OAuth2 from .database import db def get_shell(): try: from IPython.frontend.terminal.embed import InteractiveShellEmbed except ImportError: import code return lambda **context: code.interact('', local=context) else: ipython = InteractiveShellEmbed(banner1='') return lambda **context: ipython(global_ns={}, local_ns=context) def get_oauth2_handler(): GITHUB = current_app.config['GITHUB'] return OAuth2( GITHUB['client_id'], GITHUB['client_secret'], GITHUB['auth_url'], '', GITHUB['authorize_url'], GITHUB['token_url'] ) def get_user_by_token(access_token): api = Github(access_token) github_user = api.get_user() user = db.session.query(User).filter_by(github_id=github_user.id).first() if user is None: user = User( github_access_token=access_token, github_id=github_user.id, name=github_user.name, email=github_user.email, login=github_user.login, ) db.session.add(user) else: user.github_access_token = access_token user.login = github_user.login db.session.commit() return user
... ) db.session.add(user) else: ... user.login = github_user.login db.session.commit() ...
133bddf28eed38273eeb384b152ec35ae861a480
sunpy/__init__.py
sunpy/__init__.py
from __future__ import absolute_import try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' import os from sunpy.util.config import load_config, print_config from sunpy.util import system_info from sunpy.tests.runner import SunPyTestRunner self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) # Load user configuration config = load_config() __all__ = ['config', 'self_test', 'system_info']
from __future__ import absolute_import try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' try: _ASTROPY_SETUP_ except NameError: _ASTROPY_SETUP_ = False if not _ASTROPY_SETUP_: import os from sunpy.util.config import load_config, print_config from sunpy.util import system_info from sunpy.tests.runner import SunPyTestRunner self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) # Load user configuration config = load_config() __all__ = ['config', 'self_test', 'system_info']
Make sure package does not import itself during setup
Make sure package does not import itself during setup
Python
bsd-2-clause
dpshelio/sunpy,dpshelio/sunpy,dpshelio/sunpy
from __future__ import absolute_import try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' - import os - from sunpy.util.config import load_config, print_config - from sunpy.util import system_info - from sunpy.tests.runner import SunPyTestRunner + try: + _ASTROPY_SETUP_ + except NameError: + _ASTROPY_SETUP_ = False - self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) - # Load user configuration - config = load_config() + if not _ASTROPY_SETUP_: + import os + from sunpy.util.config import load_config, print_config + from sunpy.util import system_info + from sunpy.tests.runner import SunPyTestRunner - __all__ = ['config', 'self_test', 'system_info'] + self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) + # Load user configuration + config = load_config() + + __all__ = ['config', 'self_test', 'system_info'] +
Make sure package does not import itself during setup
## Code Before: from __future__ import absolute_import try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' import os from sunpy.util.config import load_config, print_config from sunpy.util import system_info from sunpy.tests.runner import SunPyTestRunner self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) # Load user configuration config = load_config() __all__ = ['config', 'self_test', 'system_info'] ## Instruction: Make sure package does not import itself during setup ## Code After: from __future__ import absolute_import try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' try: _ASTROPY_SETUP_ except NameError: _ASTROPY_SETUP_ = False if not _ASTROPY_SETUP_: import os from sunpy.util.config import load_config, print_config from sunpy.util import system_info from sunpy.tests.runner import SunPyTestRunner self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) # Load user configuration config = load_config() __all__ = ['config', 'self_test', 'system_info']
// ... existing code ... try: _ASTROPY_SETUP_ except NameError: _ASTROPY_SETUP_ = False if not _ASTROPY_SETUP_: import os from sunpy.util.config import load_config, print_config from sunpy.util import system_info from sunpy.tests.runner import SunPyTestRunner self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__)) # Load user configuration config = load_config() __all__ = ['config', 'self_test', 'system_info'] // ... rest of the code ...
dfce3efecacfa53654e06ea9be94407155fe7e4c
airship/__init__.py
airship/__init__.py
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
Create a route for fetching grefs
Create a route for fetching grefs
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) + @app.route("/grefs/<channel>") + def list_grefs(channel): + return + + return app
Create a route for fetching grefs
## Code Before: import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app ## Instruction: Create a route for fetching grefs ## Code After: import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
... @app.route("/grefs/<channel>") def list_grefs(channel): return return app ...
46c33ca68c1124fb06c4ba62306cb00ba61d7e5c
tests/__init__.py
tests/__init__.py
from flexmock import flexmock from flask.ext.storage import MockStorage from flask_uploads import init class TestCase(object): added_objects = [] committed_objects = [] created_objects = [] deleted_objects = [] def setup_method(self, method, resizer=None): init(db_mock, MockStorage, resizer) self.db = db_mock self.Storage = MockStorage self.storage = MockStorage() self.resizer = resizer def teardown_method(self, method): # Empty the stacks. TestCase.added_objects[:] = [] TestCase.committed_objects[:] = [] TestCase.created_objects[:] = [] TestCase.deleted_objects[:] = [] class MockModel(object): def __init__(self, **kw): TestCase.created_objects.append(self) for key, val in kw.iteritems(): setattr(self, key, val) db_mock = flexmock( Column=lambda *a, **kw: ('column', a, kw), Integer=('integer', [], {}), Unicode=lambda *a, **kw: ('unicode', a, kw), Model=MockModel, session=flexmock( add=TestCase.added_objects.append, commit=lambda: TestCase.committed_objects.extend( TestCase.added_objects + TestCase.deleted_objects ), delete=TestCase.deleted_objects.append, ), )
from flexmock import flexmock from flask.ext.storage import MockStorage from flask_uploads import init class TestCase(object): added_objects = [] committed_objects = [] created_objects = [] deleted_objects = [] def setup_method(self, method, resizer=None): init(db_mock, MockStorage, resizer) self.db = db_mock self.Storage = MockStorage self.storage = MockStorage() self.resizer = resizer def teardown_method(self, method): # Empty the stacks. TestCase.added_objects[:] = [] TestCase.committed_objects[:] = [] TestCase.created_objects[:] = [] TestCase.deleted_objects[:] = [] class MockModel(object): def __init__(self, **kw): TestCase.created_objects.append(self) for key, val in kw.iteritems(): setattr(self, key, val) db_mock = flexmock( Column=lambda *a, **kw: ('column', a, kw), Integer=('integer', [], {}), Unicode=lambda *a, **kw: ('unicode', a, kw), Model=MockModel, metadata=flexmock(tables={}), session=flexmock( add=TestCase.added_objects.append, commit=lambda: TestCase.committed_objects.extend( TestCase.added_objects + TestCase.deleted_objects ), delete=TestCase.deleted_objects.append, ), )
Add metadata.tables to mock db.
Add metadata.tables to mock db.
Python
mit
FelixLoether/flask-uploads,FelixLoether/flask-image-upload-thing
from flexmock import flexmock from flask.ext.storage import MockStorage from flask_uploads import init class TestCase(object): added_objects = [] committed_objects = [] created_objects = [] deleted_objects = [] def setup_method(self, method, resizer=None): init(db_mock, MockStorage, resizer) self.db = db_mock self.Storage = MockStorage self.storage = MockStorage() self.resizer = resizer def teardown_method(self, method): # Empty the stacks. TestCase.added_objects[:] = [] TestCase.committed_objects[:] = [] TestCase.created_objects[:] = [] TestCase.deleted_objects[:] = [] class MockModel(object): def __init__(self, **kw): TestCase.created_objects.append(self) for key, val in kw.iteritems(): setattr(self, key, val) db_mock = flexmock( Column=lambda *a, **kw: ('column', a, kw), Integer=('integer', [], {}), Unicode=lambda *a, **kw: ('unicode', a, kw), Model=MockModel, + metadata=flexmock(tables={}), session=flexmock( add=TestCase.added_objects.append, commit=lambda: TestCase.committed_objects.extend( TestCase.added_objects + TestCase.deleted_objects ), delete=TestCase.deleted_objects.append, ), )
Add metadata.tables to mock db.
## Code Before: from flexmock import flexmock from flask.ext.storage import MockStorage from flask_uploads import init class TestCase(object): added_objects = [] committed_objects = [] created_objects = [] deleted_objects = [] def setup_method(self, method, resizer=None): init(db_mock, MockStorage, resizer) self.db = db_mock self.Storage = MockStorage self.storage = MockStorage() self.resizer = resizer def teardown_method(self, method): # Empty the stacks. TestCase.added_objects[:] = [] TestCase.committed_objects[:] = [] TestCase.created_objects[:] = [] TestCase.deleted_objects[:] = [] class MockModel(object): def __init__(self, **kw): TestCase.created_objects.append(self) for key, val in kw.iteritems(): setattr(self, key, val) db_mock = flexmock( Column=lambda *a, **kw: ('column', a, kw), Integer=('integer', [], {}), Unicode=lambda *a, **kw: ('unicode', a, kw), Model=MockModel, session=flexmock( add=TestCase.added_objects.append, commit=lambda: TestCase.committed_objects.extend( TestCase.added_objects + TestCase.deleted_objects ), delete=TestCase.deleted_objects.append, ), ) ## Instruction: Add metadata.tables to mock db. ## Code After: from flexmock import flexmock from flask.ext.storage import MockStorage from flask_uploads import init class TestCase(object): added_objects = [] committed_objects = [] created_objects = [] deleted_objects = [] def setup_method(self, method, resizer=None): init(db_mock, MockStorage, resizer) self.db = db_mock self.Storage = MockStorage self.storage = MockStorage() self.resizer = resizer def teardown_method(self, method): # Empty the stacks. TestCase.added_objects[:] = [] TestCase.committed_objects[:] = [] TestCase.created_objects[:] = [] TestCase.deleted_objects[:] = [] class MockModel(object): def __init__(self, **kw): TestCase.created_objects.append(self) for key, val in kw.iteritems(): setattr(self, key, val) db_mock = flexmock( Column=lambda *a, **kw: ('column', a, kw), Integer=('integer', [], {}), Unicode=lambda *a, **kw: ('unicode', a, kw), Model=MockModel, metadata=flexmock(tables={}), session=flexmock( add=TestCase.added_objects.append, commit=lambda: TestCase.committed_objects.extend( TestCase.added_objects + TestCase.deleted_objects ), delete=TestCase.deleted_objects.append, ), )
# ... existing code ... Model=MockModel, metadata=flexmock(tables={}), session=flexmock( # ... rest of the code ...
f531cfa07ba6e6e0d36ba768dbeb4706ae7cd259
tlslite/utils/pycrypto_rsakey.py
tlslite/utils/pycrypto_rsakey.py
"""PyCrypto RSA implementation.""" from .cryptomath import * from .rsakey import * from .python_rsakey import Python_RSAKey if pycryptoLoaded: from Crypto.PublicKey import RSA class PyCrypto_RSAKey(RSAKey): def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0): if not d: self.rsa = RSA.construct( (n, e) ) else: self.rsa = RSA.construct( (n, e, d, p, q) ) def __getattr__(self, name): return getattr(self.rsa, name) def hasPrivateKey(self): return self.rsa.has_private() def _rawPrivateKeyOp(self, m): s = numberToString(m, numBytes(self.n)) c = stringToNumber(self.rsa.decrypt((s,))) return c def _rawPublicKeyOp(self, c): s = numberToString(c, numBytes(self.n)) m = stringToNumber(self.rsa.encrypt(s, None)[0]) return m def generate(bits): key = PyCrypto_RSAKey() def f(numBytes): return bytes(getRandomBytes(numBytes)) key.rsa = RSA.generate(bits, f) return key generate = staticmethod(generate)
"""PyCrypto RSA implementation.""" from cryptomath import * from .rsakey import * from .python_rsakey import Python_RSAKey if pycryptoLoaded: from Crypto.PublicKey import RSA class PyCrypto_RSAKey(RSAKey): def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0): if not d: self.rsa = RSA.construct( (long(n), long(e)) ) else: self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) ) def __getattr__(self, name): return getattr(self.rsa, name) def hasPrivateKey(self): return self.rsa.has_private() def _rawPrivateKeyOp(self, m): c = self.rsa.decrypt((m,)) return c def _rawPublicKeyOp(self, c): m = self.rsa.encrypt(c, None)[0] return m def generate(bits): key = PyCrypto_RSAKey() def f(numBytes): return bytes(getRandomBytes(numBytes)) key.rsa = RSA.generate(bits, f) return key generate = staticmethod(generate)
Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)
Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)
Python
lgpl-2.1
ioef/tlslite-ng,ioef/tlslite-ng,ioef/tlslite-ng
"""PyCrypto RSA implementation.""" - from .cryptomath import * + from cryptomath import * from .rsakey import * from .python_rsakey import Python_RSAKey if pycryptoLoaded: from Crypto.PublicKey import RSA class PyCrypto_RSAKey(RSAKey): def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0): if not d: - self.rsa = RSA.construct( (n, e) ) + self.rsa = RSA.construct( (long(n), long(e)) ) else: - self.rsa = RSA.construct( (n, e, d, p, q) ) + self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) ) def __getattr__(self, name): return getattr(self.rsa, name) def hasPrivateKey(self): return self.rsa.has_private() def _rawPrivateKeyOp(self, m): - s = numberToString(m, numBytes(self.n)) - c = stringToNumber(self.rsa.decrypt((s,))) + c = self.rsa.decrypt((m,)) return c def _rawPublicKeyOp(self, c): - s = numberToString(c, numBytes(self.n)) - m = stringToNumber(self.rsa.encrypt(s, None)[0]) + m = self.rsa.encrypt(c, None)[0] return m def generate(bits): key = PyCrypto_RSAKey() def f(numBytes): return bytes(getRandomBytes(numBytes)) key.rsa = RSA.generate(bits, f) return key generate = staticmethod(generate)
Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working)
## Code Before: """PyCrypto RSA implementation.""" from .cryptomath import * from .rsakey import * from .python_rsakey import Python_RSAKey if pycryptoLoaded: from Crypto.PublicKey import RSA class PyCrypto_RSAKey(RSAKey): def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0): if not d: self.rsa = RSA.construct( (n, e) ) else: self.rsa = RSA.construct( (n, e, d, p, q) ) def __getattr__(self, name): return getattr(self.rsa, name) def hasPrivateKey(self): return self.rsa.has_private() def _rawPrivateKeyOp(self, m): s = numberToString(m, numBytes(self.n)) c = stringToNumber(self.rsa.decrypt((s,))) return c def _rawPublicKeyOp(self, c): s = numberToString(c, numBytes(self.n)) m = stringToNumber(self.rsa.encrypt(s, None)[0]) return m def generate(bits): key = PyCrypto_RSAKey() def f(numBytes): return bytes(getRandomBytes(numBytes)) key.rsa = RSA.generate(bits, f) return key generate = staticmethod(generate) ## Instruction: Remove numberToString/stringToNumber in pycrypto support package and add some int to long conversions so it can happily pass the tests (I bet this is enough to get it working) ## Code After: """PyCrypto RSA implementation.""" from cryptomath import * from .rsakey import * from .python_rsakey import Python_RSAKey if pycryptoLoaded: from Crypto.PublicKey import RSA class PyCrypto_RSAKey(RSAKey): def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0): if not d: self.rsa = RSA.construct( (long(n), long(e)) ) else: self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) ) def __getattr__(self, name): return getattr(self.rsa, name) def hasPrivateKey(self): return self.rsa.has_private() def _rawPrivateKeyOp(self, m): c = self.rsa.decrypt((m,)) return c def _rawPublicKeyOp(self, c): m = self.rsa.encrypt(c, None)[0] return m def generate(bits): key = PyCrypto_RSAKey() def f(numBytes): return bytes(getRandomBytes(numBytes)) key.rsa = RSA.generate(bits, f) return key generate = staticmethod(generate)
... from cryptomath import * ... if not d: self.rsa = RSA.construct( (long(n), long(e)) ) else: self.rsa = RSA.construct( (long(n), long(e), long(d), long(p), long(q)) ) ... def _rawPrivateKeyOp(self, m): c = self.rsa.decrypt((m,)) return c ... def _rawPublicKeyOp(self, c): m = self.rsa.encrypt(c, None)[0] return m ...
7f817802445bcfea9730f29a82c87f4883fda71e
apps/package/templatetags/package_tags.py
apps/package/templatetags/package_tags.py
from datetime import timedelta from datetime import datetime from django import template from github2.client import Github from package.models import Package, Commit register = template.Library() github = Github() @register.filter def commits_over_52(package): current = datetime.now() weeks = [] commits = [x.commit_date for x in Commit.objects.filter(package=package)] for week in range(52): weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))])) current -= timedelta(7) weeks.reverse() weeks = [str(x) for x in weeks] return ','.join(weeks) @register.inclusion_tag('package/templatetags/usage.html') def usage(user, package): using = package.usage.filter(username=user) or False count = 0 if using: count = package.usage.count() - 1 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": True } @register.inclusion_tag('package/templatetags/usage.html') def usage_no_count(user, package): using = package.usage.filter(username=user) or False count = 0 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": False }
from datetime import timedelta from datetime import datetime from django import template from github2.client import Github from package.models import Package, Commit register = template.Library() github = Github() @register.filter def commits_over_52(package): current = datetime.now() weeks = [] commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True) for week in range(52): weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))])) current -= timedelta(7) weeks.reverse() weeks = map(str, weeks) return ','.join(weeks) @register.inclusion_tag('package/templatetags/usage.html') def usage(user, package): using = package.usage.filter(username=user) or False count = 0 if using: count = package.usage.count() - 1 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": True } @register.inclusion_tag('package/templatetags/usage.html') def usage_no_count(user, package): using = package.usage.filter(username=user) or False count = 0 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": False }
Update the commit_over_52 template tag to be more efficient.
Update the commit_over_52 template tag to be more efficient. Replaced several list comprehensions with in-database operations and map calls for significantly improved performance.
Python
mit
miketheman/opencomparison,miketheman/opencomparison,benracine/opencomparison,audreyr/opencomparison,nanuxbe/djangopackages,cartwheelweb/packaginator,pydanny/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages,cartwheelweb/packaginator,pydanny/djangopackages,QLGu/djangopackages,audreyr/opencomparison,nanuxbe/djangopackages,pydanny/djangopackages,QLGu/djangopackages,cartwheelweb/packaginator,benracine/opencomparison
from datetime import timedelta from datetime import datetime from django import template from github2.client import Github from package.models import Package, Commit register = template.Library() github = Github() @register.filter def commits_over_52(package): current = datetime.now() weeks = [] - commits = [x.commit_date for x in Commit.objects.filter(package=package)] + commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True) for week in range(52): weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))])) current -= timedelta(7) weeks.reverse() - weeks = [str(x) for x in weeks] + weeks = map(str, weeks) return ','.join(weeks) @register.inclusion_tag('package/templatetags/usage.html') def usage(user, package): using = package.usage.filter(username=user) or False count = 0 if using: count = package.usage.count() - 1 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": True } @register.inclusion_tag('package/templatetags/usage.html') def usage_no_count(user, package): using = package.usage.filter(username=user) or False count = 0 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": False }
Update the commit_over_52 template tag to be more efficient.
## Code Before: from datetime import timedelta from datetime import datetime from django import template from github2.client import Github from package.models import Package, Commit register = template.Library() github = Github() @register.filter def commits_over_52(package): current = datetime.now() weeks = [] commits = [x.commit_date for x in Commit.objects.filter(package=package)] for week in range(52): weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))])) current -= timedelta(7) weeks.reverse() weeks = [str(x) for x in weeks] return ','.join(weeks) @register.inclusion_tag('package/templatetags/usage.html') def usage(user, package): using = package.usage.filter(username=user) or False count = 0 if using: count = package.usage.count() - 1 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": True } @register.inclusion_tag('package/templatetags/usage.html') def usage_no_count(user, package): using = package.usage.filter(username=user) or False count = 0 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": False } ## Instruction: Update the commit_over_52 template tag to be more efficient. ## Code After: from datetime import timedelta from datetime import datetime from django import template from github2.client import Github from package.models import Package, Commit register = template.Library() github = Github() @register.filter def commits_over_52(package): current = datetime.now() weeks = [] commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True) for week in range(52): weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))])) current -= timedelta(7) weeks.reverse() weeks = map(str, weeks) return ','.join(weeks) @register.inclusion_tag('package/templatetags/usage.html') def usage(user, package): using = package.usage.filter(username=user) or False count = 0 if using: count = package.usage.count() - 1 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": True } @register.inclusion_tag('package/templatetags/usage.html') def usage_no_count(user, package): using = package.usage.filter(username=user) or False count = 0 return { "using": using, "count": count, "package_id": package.id, "user_id": user.id, "show_count": False }
// ... existing code ... weeks = [] commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True) for week in range(52): // ... modified code ... weeks.reverse() weeks = map(str, weeks) return ','.join(weeks) // ... rest of the code ...
3f64d95cae68548cbb0d5a200247b3f7d6c3ccf4
mongorm/__init__.py
mongorm/__init__.py
from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'VERSION', 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
Remove VERSION that prevented import *.
Remove VERSION that prevented import *.
Python
bsd-2-clause
rahulg/mongorm
from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ - 'VERSION', 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
Remove VERSION that prevented import *.
## Code Before: from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'VERSION', 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ] ## Instruction: Remove VERSION that prevented import *. ## Code After: from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
// ... existing code ... __all__ = [ 'ValidationError', // ... rest of the code ...
6e3ddfc47487a8841a79d6265c96ba63005fccec
bnw_handlers/command_onoff.py
bnw_handlers/command_onoff.py
from base import * import random import bnw_core.bnw_objects as objs @require_auth @defer.inlineCallbacks def cmd_on(request): """ Включение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True) if request.user['off']: defer.returnValue( dict(ok=True,desc='Welcome back!') ) else: defer.returnValue( dict(ok=True,desc='Welcoooome baaaack, I said.') ) @require_auth @defer.inlineCallbacks def cmd_off(request): """ Выключение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True) if request.user['off']: defer.returnValue( dict(ok=True,desc='See you later.') ) else: defer.returnValue( dict(ok=True,desc='C u l8r!') )
from base import * import random import bnw_core.bnw_objects as objs @require_auth @defer.inlineCallbacks def cmd_on(request): """ Включение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True) if request.user.get('off',False): defer.returnValue( dict(ok=True,desc='Welcome back!') ) else: defer.returnValue( dict(ok=True,desc='Welcoooome baaaack, I said.') ) @require_auth @defer.inlineCallbacks def cmd_off(request): """ Выключение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True) if request.user.get('off',False): defer.returnValue( dict(ok=True,desc='See you later.') ) else: defer.returnValue( dict(ok=True,desc='C u l8r!') )
Fix on/off if there is no 'off' field.
Fix on/off if there is no 'off' field.
Python
bsd-2-clause
un-def/bnw,stiletto/bnw,un-def/bnw,stiletto/bnw,ojab/bnw,ojab/bnw,stiletto/bnw,un-def/bnw,ojab/bnw,stiletto/bnw,un-def/bnw,ojab/bnw
from base import * import random import bnw_core.bnw_objects as objs @require_auth @defer.inlineCallbacks def cmd_on(request): """ Включение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True) - if request.user['off']: + if request.user.get('off',False): defer.returnValue( dict(ok=True,desc='Welcome back!') ) else: defer.returnValue( dict(ok=True,desc='Welcoooome baaaack, I said.') ) @require_auth @defer.inlineCallbacks def cmd_off(request): """ Выключение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True) - if request.user['off']: + if request.user.get('off',False): defer.returnValue( dict(ok=True,desc='See you later.') ) else: defer.returnValue( dict(ok=True,desc='C u l8r!') )
Fix on/off if there is no 'off' field.
## Code Before: from base import * import random import bnw_core.bnw_objects as objs @require_auth @defer.inlineCallbacks def cmd_on(request): """ Включение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True) if request.user['off']: defer.returnValue( dict(ok=True,desc='Welcome back!') ) else: defer.returnValue( dict(ok=True,desc='Welcoooome baaaack, I said.') ) @require_auth @defer.inlineCallbacks def cmd_off(request): """ Выключение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True) if request.user['off']: defer.returnValue( dict(ok=True,desc='See you later.') ) else: defer.returnValue( dict(ok=True,desc='C u l8r!') ) ## Instruction: Fix on/off if there is no 'off' field. ## Code After: from base import * import random import bnw_core.bnw_objects as objs @require_auth @defer.inlineCallbacks def cmd_on(request): """ Включение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True) if request.user.get('off',False): defer.returnValue( dict(ok=True,desc='Welcome back!') ) else: defer.returnValue( dict(ok=True,desc='Welcoooome baaaack, I said.') ) @require_auth @defer.inlineCallbacks def cmd_off(request): """ Выключение доставки сообщений """ _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True) if request.user.get('off',False): defer.returnValue( dict(ok=True,desc='See you later.') ) else: defer.returnValue( dict(ok=True,desc='C u l8r!') )
// ... existing code ... _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True) if request.user.get('off',False): defer.returnValue( // ... modified code ... _ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True) if request.user.get('off',False): defer.returnValue( // ... rest of the code ...
dd50858ee22c27076919614d1994e3ce9c8e2399
soundem/handlers.py
soundem/handlers.py
from flask import jsonify from soundem import app def json_error_handler(e): return jsonify({ 'status_code': e.code, 'error': 'Bad Request', 'detail': e.description }), e.code @app.errorhandler(400) def bad_request_handler(e): return json_error_handler(e) @app.errorhandler(401) def unauthorized_handler(e): return json_error_handler(e) @app.errorhandler(404) def not_found_handler(e): return json_error_handler(e) @app.errorhandler(405) def method_not_allowed_handler(e): return json_error_handler(e)
from flask import jsonify from soundem import app def json_error_handler(e): return jsonify({ 'status_code': e.code, 'error': e.name, 'detail': e.description }), e.code @app.errorhandler(400) def bad_request_handler(e): return json_error_handler(e) @app.errorhandler(401) def unauthorized_handler(e): return json_error_handler(e) @app.errorhandler(404) def not_found_handler(e): return json_error_handler(e) @app.errorhandler(405) def method_not_allowed_handler(e): return json_error_handler(e)
Fix json error handler name
Fix json error handler name
Python
mit
building4theweb/soundem-api
from flask import jsonify from soundem import app def json_error_handler(e): return jsonify({ 'status_code': e.code, - 'error': 'Bad Request', + 'error': e.name, 'detail': e.description }), e.code @app.errorhandler(400) def bad_request_handler(e): return json_error_handler(e) @app.errorhandler(401) def unauthorized_handler(e): return json_error_handler(e) @app.errorhandler(404) def not_found_handler(e): return json_error_handler(e) @app.errorhandler(405) def method_not_allowed_handler(e): return json_error_handler(e)
Fix json error handler name
## Code Before: from flask import jsonify from soundem import app def json_error_handler(e): return jsonify({ 'status_code': e.code, 'error': 'Bad Request', 'detail': e.description }), e.code @app.errorhandler(400) def bad_request_handler(e): return json_error_handler(e) @app.errorhandler(401) def unauthorized_handler(e): return json_error_handler(e) @app.errorhandler(404) def not_found_handler(e): return json_error_handler(e) @app.errorhandler(405) def method_not_allowed_handler(e): return json_error_handler(e) ## Instruction: Fix json error handler name ## Code After: from flask import jsonify from soundem import app def json_error_handler(e): return jsonify({ 'status_code': e.code, 'error': e.name, 'detail': e.description }), e.code @app.errorhandler(400) def bad_request_handler(e): return json_error_handler(e) @app.errorhandler(401) def unauthorized_handler(e): return json_error_handler(e) @app.errorhandler(404) def not_found_handler(e): return json_error_handler(e) @app.errorhandler(405) def method_not_allowed_handler(e): return json_error_handler(e)
# ... existing code ... 'status_code': e.code, 'error': e.name, 'detail': e.description # ... rest of the code ...
9fba9934c9b47881ee468f295a3710f2c184fab1
tendrl/node_agent/__init__.py
tendrl/node_agent/__init__.py
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS()
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
Fix greenlet and essential objects startup order
Fix greenlet and essential objects startup order
Python
lgpl-2.1
Tendrl/node_agent,Tendrl/node-agent,r0h4n/node-agent,Tendrl/node_agent,Tendrl/node-agent,Tendrl/node-agent,r0h4n/node-agent,r0h4n/node-agent
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() + NodeAgentNS() - import __builtin__ - __builtin__.tendrl_ns = NodeAgentNS()
Fix greenlet and essential objects startup order
## Code Before: try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS() ## Instruction: Fix greenlet and essential objects startup order ## Code After: try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
// ... existing code ... NodeAgentNS() // ... rest of the code ...
02ab421105754e6ec258bc7c48b794bcb8ad95ec
HOME/.ipython/profile_default/ipython_config.py
HOME/.ipython/profile_default/ipython_config.py
c.TerminalIPythonApp.display_banner = False c.TerminalInteractiveShell.confirm_exit = False c.TerminalInteractiveShell.highlighting_style = "monokai" c.TerminalInteractiveShell.term_title = False
c.TerminalIPythonApp.display_banner = False c.TerminalInteractiveShell.confirm_exit = False c.TerminalInteractiveShell.highlighting_style = "monokai" c.TerminalInteractiveShell.term_title = False import logging logging.getLogger('parso').level = logging.WARN
Fix spammy logging during IPython tab complete
Fix spammy logging during IPython tab complete https://github.com/ipython/ipython/issues/10946
Python
mit
kbd/setup,kbd/setup,kbd/setup,kbd/setup,kbd/setup
c.TerminalIPythonApp.display_banner = False c.TerminalInteractiveShell.confirm_exit = False c.TerminalInteractiveShell.highlighting_style = "monokai" c.TerminalInteractiveShell.term_title = False + import logging + logging.getLogger('parso').level = logging.WARN
Fix spammy logging during IPython tab complete
## Code Before: c.TerminalIPythonApp.display_banner = False c.TerminalInteractiveShell.confirm_exit = False c.TerminalInteractiveShell.highlighting_style = "monokai" c.TerminalInteractiveShell.term_title = False ## Instruction: Fix spammy logging during IPython tab complete ## Code After: c.TerminalIPythonApp.display_banner = False c.TerminalInteractiveShell.confirm_exit = False c.TerminalInteractiveShell.highlighting_style = "monokai" c.TerminalInteractiveShell.term_title = False import logging logging.getLogger('parso').level = logging.WARN
# ... existing code ... c.TerminalInteractiveShell.term_title = False import logging logging.getLogger('parso').level = logging.WARN # ... rest of the code ...
ddaf1a2b6744c9012546d6258b0378ab1b96d658
zerver/lib/i18n.py
zerver/lib/i18n.py
from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) if 'percent_translated' not in lang_info: lang_info['percent_translated'] = 'N/A' lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes
from __future__ import absolute_import from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes
Return unformatted list from get_language_list.
Return unformatted list from get_language_list.
Python
apache-2.0
grave-w-grave/zulip,amanharitsh123/zulip,kou/zulip,vikas-parashar/zulip,JPJPJPOPOP/zulip,ahmadassaf/zulip,andersk/zulip,vikas-parashar/zulip,andersk/zulip,hackerkid/zulip,kou/zulip,eeshangarg/zulip,ahmadassaf/zulip,dhcrzf/zulip,showell/zulip,punchagan/zulip,vabs22/zulip,peguin40/zulip,JPJPJPOPOP/zulip,shubhamdhama/zulip,joyhchen/zulip,arpith/zulip,amanharitsh123/zulip,dattatreya303/zulip,mahim97/zulip,rishig/zulip,ahmadassaf/zulip,umkay/zulip,andersk/zulip,jainayush975/zulip,verma-varsha/zulip,joyhchen/zulip,jphilipsen05/zulip,christi3k/zulip,sup95/zulip,blaze225/zulip,umkay/zulip,joyhchen/zulip,paxapy/zulip,showell/zulip,j831/zulip,synicalsyntax/zulip,cosmicAsymmetry/zulip,arpith/zulip,jainayush975/zulip,susansls/zulip,rht/zulip,showell/zulip,KingxBanana/zulip,AZtheAsian/zulip,mahim97/zulip,jphilipsen05/zulip,SmartPeople/zulip,blaze225/zulip,hackerkid/zulip,brockwhittaker/zulip,Juanvulcano/zulip,samatdav/zulip,isht3/zulip,punchagan/zulip,PhilSk/zulip,joyhchen/zulip,jrowan/zulip,vikas-parashar/zulip,susansls/zulip,punchagan/zulip,dawran6/zulip,Jianchun1/zulip,hackerkid/zulip,calvinleenyc/zulip,brockwhittaker/zulip,dattatreya303/zulip,Juanvulcano/zulip,andersk/zulip,susansls/zulip,shubhamdhama/zulip,jainayush975/zulip,zacps/zulip,Juanvulcano/zulip,showell/zulip,jrowan/zulip,rishig/zulip,sonali0901/zulip,sup95/zulip,niftynei/zulip,jainayush975/zulip,Juanvulcano/zulip,verma-varsha/zulip,jackrzhang/zulip,KingxBanana/zulip,jackrzhang/zulip,vikas-parashar/zulip,umkay/zulip,j831/zulip,susansls/zulip,vaidap/zulip,rishig/zulip,peguin40/zulip,vikas-parashar/zulip,zulip/zulip,calvinleenyc/zulip,ryanbackman/zulip,showell/zulip,TigorC/zulip,AZtheAsian/zulip,brainwane/zulip,calvinleenyc/zulip,blaze225/zulip,cosmicAsymmetry/zulip,dawran6/zulip,synicalsyntax/zulip,Diptanshu8/zulip,samatdav/zulip,arpith/zulip,joyhchen/zulip,dawran6/zulip,Galexrt/zulip,paxapy/zulip,sup95/zulip,christi3k/zulip,jphilipsen05/zulip,jainayush975/zulip,punchagan/zulip,ahmadassaf/zulip,souravbadami/zulip,TigorC/zulip,Jianchun1/zulip,isht3/zulip,AZtheAsian/zulip,sonali0901/zulip,arpith/zulip,JPJPJPOPOP/zulip,souravbadami/zulip,shubhamdhama/zulip,verma-varsha/zulip,peguin40/zulip,grave-w-grave/zulip,niftynei/zulip,JPJPJPOPOP/zulip,vabs22/zulip,jackrzhang/zulip,isht3/zulip,sup95/zulip,jrowan/zulip,jackrzhang/zulip,paxapy/zulip,paxapy/zulip,Diptanshu8/zulip,kou/zulip,souravbadami/zulip,jphilipsen05/zulip,amanharitsh123/zulip,AZtheAsian/zulip,dattatreya303/zulip,samatdav/zulip,eeshangarg/zulip,rht/zulip,SmartPeople/zulip,hackerkid/zulip,sharmaeklavya2/zulip,brockwhittaker/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,jrowan/zulip,tommyip/zulip,cosmicAsymmetry/zulip,christi3k/zulip,dhcrzf/zulip,dhcrzf/zulip,tommyip/zulip,vaidap/zulip,sharmaeklavya2/zulip,andersk/zulip,Diptanshu8/zulip,blaze225/zulip,krtkmj/zulip,ryanbackman/zulip,shubhamdhama/zulip,blaze225/zulip,rht/zulip,rishig/zulip,timabbott/zulip,verma-varsha/zulip,grave-w-grave/zulip,umkay/zulip,aakash-cr7/zulip,jackrzhang/zulip,sharmaeklavya2/zulip,cosmicAsymmetry/zulip,blaze225/zulip,dhcrzf/zulip,SmartPeople/zulip,zulip/zulip,dhcrzf/zulip,krtkmj/zulip,brainwane/zulip,souravbadami/zulip,eeshangarg/zulip,punchagan/zulip,krtkmj/zulip,calvinleenyc/zulip,Jianchun1/zulip,aakash-cr7/zulip,brockwhittaker/zulip,jackrzhang/zulip,TigorC/zulip,niftynei/zulip,zulip/zulip,umkay/zulip,dawran6/zulip,punchagan/zulip,j831/zulip,synicalsyntax/zulip,amyliu345/zulip,christi3k/zulip,j831/zulip,brockwhittaker/zulip,peguin40/zulip,kou/zulip,PhilSk/zulip,peguin40/zulip,amyliu345/zulip,brainwane/zulip,timabbott/zulip,isht3/zulip,souravbadami/zulip,brainwane/zulip,amyliu345/zulip,Galexrt/zulip,ryanbackman/zulip,rht/zulip,grave-w-grave/zulip,Jianchun1/zulip,reyha/zulip,rishig/zulip,PhilSk/zulip,synicalsyntax/zulip,samatdav/zulip,JPJPJPOPOP/zulip,Jianchun1/zulip,sonali0901/zulip,ahmadassaf/zulip,eeshangarg/zulip,SmartPeople/zulip,susansls/zulip,amyliu345/zulip,zulip/zulip,reyha/zulip,rishig/zulip,synicalsyntax/zulip,jphilipsen05/zulip,mohsenSy/zulip,jainayush975/zulip,timabbott/zulip,mohsenSy/zulip,brainwane/zulip,tommyip/zulip,KingxBanana/zulip,zacps/zulip,Galexrt/zulip,joyhchen/zulip,mahim97/zulip,KingxBanana/zulip,synicalsyntax/zulip,zulip/zulip,mahim97/zulip,krtkmj/zulip,reyha/zulip,AZtheAsian/zulip,aakash-cr7/zulip,jrowan/zulip,calvinleenyc/zulip,tommyip/zulip,samatdav/zulip,rht/zulip,zulip/zulip,christi3k/zulip,dawran6/zulip,isht3/zulip,vabs22/zulip,souravbadami/zulip,Diptanshu8/zulip,TigorC/zulip,sonali0901/zulip,showell/zulip,eeshangarg/zulip,amanharitsh123/zulip,amanharitsh123/zulip,mahim97/zulip,dhcrzf/zulip,andersk/zulip,mohsenSy/zulip,niftynei/zulip,cosmicAsymmetry/zulip,mohsenSy/zulip,verma-varsha/zulip,ahmadassaf/zulip,timabbott/zulip,hackerkid/zulip,PhilSk/zulip,zacps/zulip,hackerkid/zulip,rht/zulip,Galexrt/zulip,tommyip/zulip,jackrzhang/zulip,brainwane/zulip,rishig/zulip,kou/zulip,krtkmj/zulip,Galexrt/zulip,dattatreya303/zulip,timabbott/zulip,ryanbackman/zulip,sup95/zulip,j831/zulip,zulip/zulip,kou/zulip,TigorC/zulip,timabbott/zulip,j831/zulip,amanharitsh123/zulip,KingxBanana/zulip,timabbott/zulip,vabs22/zulip,shubhamdhama/zulip,shubhamdhama/zulip,vaidap/zulip,AZtheAsian/zulip,verma-varsha/zulip,dattatreya303/zulip,mahim97/zulip,grave-w-grave/zulip,tommyip/zulip,zacps/zulip,SmartPeople/zulip,JPJPJPOPOP/zulip,mohsenSy/zulip,sonali0901/zulip,eeshangarg/zulip,brainwane/zulip,reyha/zulip,Jianchun1/zulip,sonali0901/zulip,eeshangarg/zulip,sharmaeklavya2/zulip,vaidap/zulip,peguin40/zulip,KingxBanana/zulip,andersk/zulip,dawran6/zulip,brockwhittaker/zulip,umkay/zulip,calvinleenyc/zulip,Juanvulcano/zulip,reyha/zulip,ahmadassaf/zulip,grave-w-grave/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,arpith/zulip,samatdav/zulip,Diptanshu8/zulip,punchagan/zulip,zacps/zulip,vabs22/zulip,ryanbackman/zulip,mohsenSy/zulip,dhcrzf/zulip,niftynei/zulip,shubhamdhama/zulip,amyliu345/zulip,jphilipsen05/zulip,vikas-parashar/zulip,SmartPeople/zulip,sup95/zulip,susansls/zulip,TigorC/zulip,kou/zulip,showell/zulip,arpith/zulip,PhilSk/zulip,tommyip/zulip,christi3k/zulip,jrowan/zulip,niftynei/zulip,paxapy/zulip,paxapy/zulip,Galexrt/zulip,hackerkid/zulip,zacps/zulip,krtkmj/zulip,vabs22/zulip,reyha/zulip,Juanvulcano/zulip,aakash-cr7/zulip,vaidap/zulip,Galexrt/zulip,sharmaeklavya2/zulip,vaidap/zulip,PhilSk/zulip,aakash-cr7/zulip,aakash-cr7/zulip,dattatreya303/zulip,umkay/zulip,ryanbackman/zulip,amyliu345/zulip,rht/zulip,isht3/zulip,Diptanshu8/zulip
+ from __future__ import absolute_import + from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) - if 'percent_translated' not in lang_info: - lang_info['percent_translated'] = 'N/A' lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes +
Return unformatted list from get_language_list.
## Code Before: from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) if 'percent_translated' not in lang_info: lang_info['percent_translated'] = 'N/A' lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes ## Instruction: Return unformatted list from get_language_list. ## Code After: from __future__ import absolute_import from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes
# ... existing code ... from __future__ import absolute_import from django.conf import settings # ... modified code ... lang_info['name'] = with_language(name, lang_info['code']) lang_list.append(lang_info) ... return codes # ... rest of the code ...
8eb47d151868c8e5906af054749993cd46a73b2d
capstone/player/kerasplayer.py
capstone/player/kerasplayer.py
from keras.models import load_model from . import Player from ..utils import normalize_board, utility class KerasPlayer(Player): ''' Takes moves based on a Keras neural network model. ''' name = 'Keras' def __init__(self, filepath): self.model = load_model(filepath) def __str__(self): return self.name def __repr__(self): return self.name ########## # Player # ########## def choose_move(self, state): assert state.cur_player() == 0 best_action = None best_value = -1000000 for action in state.legal_moves(): s = state.copy() s = s.make_move(action) value = self.model.predict(normalize_board(s.board), batch_size=1) assert value >= -1.0 and value <= 1.0 if value > best_value: best_action = action best_value = value return best_action
from keras.models import load_model from . import Player from ..utils import normalize_board, utility class KerasPlayer(Player): ''' Takes moves based on a Keras neural network model. ''' name = 'Keras' def __init__(self, filepath): self.model = load_model(filepath) def __str__(self): return self.name def __repr__(self): return self.name ########## # Player # ########## def choose_move(self, game): assert game.cur_player() == 0 best_move = None best_value = -1000000 for move in game.legal_moves(): next_game = game.copy().make_move(move) value = self.model.predict(normalize_board(next_game.board), batch_size=1) assert value >= -1.0 and value <= 1.0 if value > best_value: best_move = move best_value = value return best_move
Rename state to game in KerasPlayer
Rename state to game in KerasPlayer
Python
mit
davidrobles/mlnd-capstone-code
from keras.models import load_model from . import Player from ..utils import normalize_board, utility class KerasPlayer(Player): ''' Takes moves based on a Keras neural network model. ''' name = 'Keras' def __init__(self, filepath): self.model = load_model(filepath) def __str__(self): return self.name def __repr__(self): return self.name ########## # Player # ########## - def choose_move(self, state): + def choose_move(self, game): - assert state.cur_player() == 0 + assert game.cur_player() == 0 - best_action = None + best_move = None best_value = -1000000 - for action in state.legal_moves(): + for move in game.legal_moves(): + next_game = game.copy().make_move(move) - s = state.copy() - s = s.make_move(action) - value = self.model.predict(normalize_board(s.board), batch_size=1) + value = self.model.predict(normalize_board(next_game.board), batch_size=1) assert value >= -1.0 and value <= 1.0 if value > best_value: - best_action = action + best_move = move best_value = value - return best_action + return best_move
Rename state to game in KerasPlayer
## Code Before: from keras.models import load_model from . import Player from ..utils import normalize_board, utility class KerasPlayer(Player): ''' Takes moves based on a Keras neural network model. ''' name = 'Keras' def __init__(self, filepath): self.model = load_model(filepath) def __str__(self): return self.name def __repr__(self): return self.name ########## # Player # ########## def choose_move(self, state): assert state.cur_player() == 0 best_action = None best_value = -1000000 for action in state.legal_moves(): s = state.copy() s = s.make_move(action) value = self.model.predict(normalize_board(s.board), batch_size=1) assert value >= -1.0 and value <= 1.0 if value > best_value: best_action = action best_value = value return best_action ## Instruction: Rename state to game in KerasPlayer ## Code After: from keras.models import load_model from . import Player from ..utils import normalize_board, utility class KerasPlayer(Player): ''' Takes moves based on a Keras neural network model. ''' name = 'Keras' def __init__(self, filepath): self.model = load_model(filepath) def __str__(self): return self.name def __repr__(self): return self.name ########## # Player # ########## def choose_move(self, game): assert game.cur_player() == 0 best_move = None best_value = -1000000 for move in game.legal_moves(): next_game = game.copy().make_move(move) value = self.model.predict(normalize_board(next_game.board), batch_size=1) assert value >= -1.0 and value <= 1.0 if value > best_value: best_move = move best_value = value return best_move
// ... existing code ... def choose_move(self, game): assert game.cur_player() == 0 best_move = None best_value = -1000000 for move in game.legal_moves(): next_game = game.copy().make_move(move) value = self.model.predict(normalize_board(next_game.board), batch_size=1) assert value >= -1.0 and value <= 1.0 // ... modified code ... if value > best_value: best_move = move best_value = value return best_move // ... rest of the code ...
e0f4135b90a3f920db3a14b14b70e0e57df3d717
setup.py
setup.py
from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
Build for either python 2 or python 3
Build for either python 2 or python 3
Python
apache-2.0
admiyo/PyKerberos,admiyo/PyKerberos,admiyo/PyKerberos
from distutils.core import setup, Extension import sys - import commands + + if sys.version_info < (3,0): + import commands as subprocess + else: + import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", - extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), + extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), - extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), + extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
Build for either python 2 or python 3
## Code Before: from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], ) ## Instruction: Build for either python 2 or python 3 ## Code After: from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
... import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess ... "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ ...
6df7ee955c7dfaee9a597b331dbc4c448fe3738a
fpr/migrations/0017_ocr_unique_names.py
fpr/migrations/0017_ocr_unique_names.py
from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ FPCommand = apps.get_model('fpr', 'FPCommand') ocr_command = FPCommand.objects.get( uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
Fix OCR command UUID typo
Fix OCR command UUID typo
Python
agpl-3.0
artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin
from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ - IDCommand = apps.get_model('fpr', 'IDCommand') + FPCommand = apps.get_model('fpr', 'FPCommand') - ocr_command = IDCommand.objects.get( + ocr_command = FPCommand.objects.get( - uuid='5d501dbf-76bb-4569-a9db-9e367800995e') + uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
Fix OCR command UUID typo
## Code Before: from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ] ## Instruction: Fix OCR command UUID typo ## Code After: from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ FPCommand = apps.get_model('fpr', 'FPCommand') ocr_command = FPCommand.objects.get( uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
... """ FPCommand = apps.get_model('fpr', 'FPCommand') ocr_command = FPCommand.objects.get( uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a') ocr_command.command = ( ...
bd3d97cefe61886ab8c2fa24eecd624ca1c6f751
profile_collection/startup/90-settings.py
profile_collection/startup/90-settings.py
import logging # metadata set at startup RE.md['owner'] = 'xf11id' RE.md['beamline_id'] = 'CHX' # removing 'custom' as it is raising an exception in 0.3.2 # gs.RE.md['custom'] = {} def print_scanid(name, doc): if name == 'start': print('Scan ID:', doc['scan_id']) print('Unique ID:', doc['uid']) def print_md(name, doc): if name == 'start': print('Metadata:\n', repr(doc)) RE.subscribe(print_scanid) #from eiger_io.fs_handler import LazyEigerHandler #db.fs.register_handler("AD_EIGER", LazyEigerHandler)
import logging # metadata set at startup RE.md['owner'] = 'xf11id' RE.md['beamline_id'] = 'CHX' # removing 'custom' as it is raising an exception in 0.3.2 # gs.RE.md['custom'] = {} def print_md(name, doc): if name == 'start': print('Metadata:\n', repr(doc)) RE.subscribe(print_scanid) #from eiger_io.fs_handler import LazyEigerHandler #db.fs.register_handler("AD_EIGER", LazyEigerHandler)
Remove redundant Scan ID printing (there is another one elsewhere)
Remove redundant Scan ID printing (there is another one elsewhere)
Python
bsd-2-clause
NSLS-II-CHX/ipython_ophyd,NSLS-II-CHX/ipython_ophyd
import logging # metadata set at startup RE.md['owner'] = 'xf11id' RE.md['beamline_id'] = 'CHX' # removing 'custom' as it is raising an exception in 0.3.2 # gs.RE.md['custom'] = {} - - - - def print_scanid(name, doc): - if name == 'start': - print('Scan ID:', doc['scan_id']) - print('Unique ID:', doc['uid']) def print_md(name, doc): if name == 'start': print('Metadata:\n', repr(doc)) RE.subscribe(print_scanid) #from eiger_io.fs_handler import LazyEigerHandler #db.fs.register_handler("AD_EIGER", LazyEigerHandler)
Remove redundant Scan ID printing (there is another one elsewhere)
## Code Before: import logging # metadata set at startup RE.md['owner'] = 'xf11id' RE.md['beamline_id'] = 'CHX' # removing 'custom' as it is raising an exception in 0.3.2 # gs.RE.md['custom'] = {} def print_scanid(name, doc): if name == 'start': print('Scan ID:', doc['scan_id']) print('Unique ID:', doc['uid']) def print_md(name, doc): if name == 'start': print('Metadata:\n', repr(doc)) RE.subscribe(print_scanid) #from eiger_io.fs_handler import LazyEigerHandler #db.fs.register_handler("AD_EIGER", LazyEigerHandler) ## Instruction: Remove redundant Scan ID printing (there is another one elsewhere) ## Code After: import logging # metadata set at startup RE.md['owner'] = 'xf11id' RE.md['beamline_id'] = 'CHX' # removing 'custom' as it is raising an exception in 0.3.2 # gs.RE.md['custom'] = {} def print_md(name, doc): if name == 'start': print('Metadata:\n', repr(doc)) RE.subscribe(print_scanid) #from eiger_io.fs_handler import LazyEigerHandler #db.fs.register_handler("AD_EIGER", LazyEigerHandler)
// ... existing code ... # gs.RE.md['custom'] = {} // ... rest of the code ...
d34fbc70d5873d159c311caed41b745b05534ce9
lib/solution.py
lib/solution.py
class Solution: def __init__(self, nr): self.nr = nr self.test = False self.input = "" self.solution = ["(not calculated)", "(not calculated)"] self.calculated = [False, False] def __str__(self): return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1]) def calculate(self, test=False): raise NotImplementedError('users must define calculate to use this base class') def get_solution(self, nr): if nr in [1, 2]: return self.solution[nr-1] def set_solution(self, nr, value): if nr in [1, 2]: self.solution[nr-1] = value self.calculated[nr-1] = True def is_calculated(self, nr): if nr in [1, 2]: return self.calculated[nr-1] def read_input(self): with open(self.nr+"/input.txt", "r") as f: self.input = f.read()
class Solution: def __init__(self, nr): self.nr = nr self.test = False self.input = "" self.solution = ["(not calculated)", "(not calculated)"] self.calculated = [False, False] def __str__(self): return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1]) def calculate(self, test=False): raise NotImplementedError('users must define calculate to use this base class') def get_solution(self, nr): if nr in [1, 2]: return self.solution[nr-1] def set_solution(self, nr, value): if nr in [1, 2]: self.solution[nr-1] = value self.calculated[nr-1] = True def is_calculated(self, nr): if nr in [1, 2]: return self.calculated[nr-1] def read_input(self, lines=False): with open(self.nr+"/input.txt", "r") as f: if lines: self.input = f.readlines() else: self.input = f.read()
Read Input: Read file complete or by lines
Read Input: Read file complete or by lines
Python
mit
unstko/adventofcode2016
class Solution: def __init__(self, nr): self.nr = nr self.test = False self.input = "" self.solution = ["(not calculated)", "(not calculated)"] self.calculated = [False, False] def __str__(self): return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1]) def calculate(self, test=False): raise NotImplementedError('users must define calculate to use this base class') def get_solution(self, nr): if nr in [1, 2]: return self.solution[nr-1] def set_solution(self, nr, value): if nr in [1, 2]: self.solution[nr-1] = value self.calculated[nr-1] = True def is_calculated(self, nr): if nr in [1, 2]: return self.calculated[nr-1] - def read_input(self): + def read_input(self, lines=False): with open(self.nr+"/input.txt", "r") as f: + if lines: + self.input = f.readlines() + else: - self.input = f.read() + self.input = f.read()
Read Input: Read file complete or by lines
## Code Before: class Solution: def __init__(self, nr): self.nr = nr self.test = False self.input = "" self.solution = ["(not calculated)", "(not calculated)"] self.calculated = [False, False] def __str__(self): return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1]) def calculate(self, test=False): raise NotImplementedError('users must define calculate to use this base class') def get_solution(self, nr): if nr in [1, 2]: return self.solution[nr-1] def set_solution(self, nr, value): if nr in [1, 2]: self.solution[nr-1] = value self.calculated[nr-1] = True def is_calculated(self, nr): if nr in [1, 2]: return self.calculated[nr-1] def read_input(self): with open(self.nr+"/input.txt", "r") as f: self.input = f.read() ## Instruction: Read Input: Read file complete or by lines ## Code After: class Solution: def __init__(self, nr): self.nr = nr self.test = False self.input = "" self.solution = ["(not calculated)", "(not calculated)"] self.calculated = [False, False] def __str__(self): return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1]) def calculate(self, test=False): raise NotImplementedError('users must define calculate to use this base class') def get_solution(self, nr): if nr in [1, 2]: return self.solution[nr-1] def set_solution(self, nr, value): if nr in [1, 2]: self.solution[nr-1] = value self.calculated[nr-1] = True def is_calculated(self, nr): if nr in [1, 2]: return self.calculated[nr-1] def read_input(self, lines=False): with open(self.nr+"/input.txt", "r") as f: if lines: self.input = f.readlines() else: self.input = f.read()
// ... existing code ... def read_input(self, lines=False): with open(self.nr+"/input.txt", "r") as f: if lines: self.input = f.readlines() else: self.input = f.read() // ... rest of the code ...
9ec8d2b01e0f8aefc9d4c2c82c22af6f8c48a75b
usingnamespace/api/interfaces.py
usingnamespace/api/interfaces.py
from zope.interface import Interface class ISerializer(Interface): """Marker Interface"""
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
Add new marker interface for a digest method
Add new marker interface for a digest method
Python
isc
usingnamespace/usingnamespace
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" + class IDigestMethod(Interface): + """Marker Interface""" +
Add new marker interface for a digest method
## Code Before: from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" ## Instruction: Add new marker interface for a digest method ## Code After: from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
... """Marker Interface""" class IDigestMethod(Interface): """Marker Interface""" ...
aed4d20d4e101891d2dd1149a6c111f06036ec73
libnacl/utils.py
libnacl/utils.py
import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def hex_seed(self): if hasattr(self, 'seed'): return libnacl.encode.hex_encode(self.seed) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce.encode(encoding='UTF-8')
import libnacl import libnacl.encode # Import python libs import time import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def hex_seed(self): if hasattr(self, 'seed'): return libnacl.encode.hex_encode(self.seed) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0}{1}'.format( str(int(time.time() * 1000000)), binascii.hexlify(libnacl.randombytes(24)).decode(encoding='UTF-8')) return nonce.encode(encoding='UTF-8')[:libnacl.crypto_box_NONCEBYTES]
Make the nonce more secure and faster to generate
Make the nonce more secure and faster to generate
Python
apache-2.0
cachedout/libnacl,saltstack/libnacl,mindw/libnacl,johnttan/libnacl,RaetProtocol/libnacl,coinkite/libnacl
import libnacl import libnacl.encode # Import python libs - import datetime + import time import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def hex_seed(self): if hasattr(self, 'seed'): return libnacl.encode.hex_encode(self.seed) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' - nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( + nonce = '{0}{1}'.format( - datetime.datetime.now(), + str(int(time.time() * 1000000)), - binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) + binascii.hexlify(libnacl.randombytes(24)).decode(encoding='UTF-8')) - return nonce.encode(encoding='UTF-8') + return nonce.encode(encoding='UTF-8')[:libnacl.crypto_box_NONCEBYTES]
Make the nonce more secure and faster to generate
## Code Before: import libnacl import libnacl.encode # Import python libs import datetime import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def hex_seed(self): if hasattr(self, 'seed'): return libnacl.encode.hex_encode(self.seed) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0:%Y%m%d%H%M%S%f}{1}'.format( datetime.datetime.now(), binascii.hexlify(libnacl.randombytes(2)).decode(encoding='UTF-8')) return nonce.encode(encoding='UTF-8') ## Instruction: Make the nonce more secure and faster to generate ## Code After: import libnacl import libnacl.encode # Import python libs import time import binascii class BaseKey(object): ''' Include methods for key management convenience ''' def hex_sk(self): if hasattr(self, 'sk'): return libnacl.encode.hex_encode(self.sk) else: return '' def hex_pk(self): if hasattr(self, 'pk'): return libnacl.encode.hex_encode(self.pk) def hex_vk(self): if hasattr(self, 'vk'): return libnacl.encode.hex_encode(self.vk) def hex_seed(self): if hasattr(self, 'seed'): return libnacl.encode.hex_encode(self.seed) def salsa_key(): ''' Generates a salsa2020 key ''' return libnacl.randombytes(libnacl.crypto_secretbox_KEYBYTES) def time_nonce(): ''' Generates a safe nonce The nonce generated here is done by grabbing the 20 digit microsecond timestamp and appending 4 random chars ''' nonce = '{0}{1}'.format( str(int(time.time() * 1000000)), binascii.hexlify(libnacl.randombytes(24)).decode(encoding='UTF-8')) return nonce.encode(encoding='UTF-8')[:libnacl.crypto_box_NONCEBYTES]
... # Import python libs import time import binascii ... ''' nonce = '{0}{1}'.format( str(int(time.time() * 1000000)), binascii.hexlify(libnacl.randombytes(24)).decode(encoding='UTF-8')) return nonce.encode(encoding='UTF-8')[:libnacl.crypto_box_NONCEBYTES] ...
0a850f935ce6cc48a68cffbef64c127daa22a42f
write.py
write.py
import colour import csv import json import os import pprint # write to file as json, csv, markdown, plaintext or print table def write_data(data, user, format=None): if format is not None: directory = './data/' if not os.path.exists(directory): os.makedirs(directory) f = open(directory + user + '.' + format, 'w') if format == 'json': f.write(json.dumps(data, indent=4)) elif format == 'csv': keys = data[0].keys() dw = csv.DictWriter(f, fieldnames=keys) dw.writeheader() dw.writerows(data) elif format == 'md': f.write('## %s - GitHub repositories\n' % user) for row in data: f.write( '#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) elif format == 'txt': f.write('%s - GitHub repositories\n\n' % user) for row in data: f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) f.close()
import csv import json import os from tabulate import tabulate def write_data(d, u, f=None): if f is not None: directory = './data/' if not os.path.exists(directory): os.makedirs(directory) file = open(directory + u + '.' + f, 'w') if f == 'json': file.write(json.dumps(d, indent=4)) elif f == 'csv': keys = d[0].keys() dw = csv.DictWriter(file, fieldnames=keys) dw.writeheader() dw.writerows(d) elif f == 'md': file.write('## %s - GitHub repositories\n' % u) for row in d: file.write( '#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) elif f == 'txt': file.write('%s - GitHub repositories\n\n' % u) for row in d: file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) file.close() else: print(tabulate(d, headers="keys"))
Print table if no file format provided
Print table if no file format provided
Python
mit
kshvmdn/github-list,kshvmdn/github-list,kshvmdn/github-list
- import colour import csv import json import os - import pprint + from tabulate import tabulate - # write to file as json, csv, markdown, plaintext or print table - - - def write_data(data, user, format=None): + def write_data(d, u, f=None): - if format is not None: + if f is not None: directory = './data/' if not os.path.exists(directory): os.makedirs(directory) - f = open(directory + user + '.' + format, 'w') + file = open(directory + u + '.' + f, 'w') - if format == 'json': + if f == 'json': - f.write(json.dumps(data, indent=4)) + file.write(json.dumps(d, indent=4)) - elif format == 'csv': + elif f == 'csv': - keys = data[0].keys() + keys = d[0].keys() - dw = csv.DictWriter(f, fieldnames=keys) + dw = csv.DictWriter(file, fieldnames=keys) dw.writeheader() - dw.writerows(data) + dw.writerows(d) - elif format == 'md': + elif f == 'md': - f.write('## %s - GitHub repositories\n' % user) + file.write('## %s - GitHub repositories\n' % u) - for row in data: + for row in d: - f.write( + file.write( '#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) - elif format == 'txt': + elif f == 'txt': - f.write('%s - GitHub repositories\n\n' % user) + file.write('%s - GitHub repositories\n\n' % u) - for row in data: + for row in d: - f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], + file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], - row['desc'], + row['desc'], - row['lang'], + row['lang'], - row['stars'])) + row['stars'])) + file.close() + else: + print(tabulate(d, headers="keys")) - f.close() -
Print table if no file format provided
## Code Before: import colour import csv import json import os import pprint # write to file as json, csv, markdown, plaintext or print table def write_data(data, user, format=None): if format is not None: directory = './data/' if not os.path.exists(directory): os.makedirs(directory) f = open(directory + user + '.' + format, 'w') if format == 'json': f.write(json.dumps(data, indent=4)) elif format == 'csv': keys = data[0].keys() dw = csv.DictWriter(f, fieldnames=keys) dw.writeheader() dw.writerows(data) elif format == 'md': f.write('## %s - GitHub repositories\n' % user) for row in data: f.write( '#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) elif format == 'txt': f.write('%s - GitHub repositories\n\n' % user) for row in data: f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) f.close() ## Instruction: Print table if no file format provided ## Code After: import csv import json import os from tabulate import tabulate def write_data(d, u, f=None): if f is not None: directory = './data/' if not os.path.exists(directory): os.makedirs(directory) file = open(directory + u + '.' + f, 'w') if f == 'json': file.write(json.dumps(d, indent=4)) elif f == 'csv': keys = d[0].keys() dw = csv.DictWriter(file, fieldnames=keys) dw.writeheader() dw.writerows(d) elif f == 'md': file.write('## %s - GitHub repositories\n' % u) for row in d: file.write( '#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) elif f == 'txt': file.write('%s - GitHub repositories\n\n' % u) for row in d: file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) file.close() else: print(tabulate(d, headers="keys"))
# ... existing code ... import csv # ... modified code ... import os from tabulate import tabulate ... def write_data(d, u, f=None): if f is not None: ... file = open(directory + u + '.' + f, 'w') if f == 'json': file.write(json.dumps(d, indent=4)) elif f == 'csv': keys = d[0].keys() dw = csv.DictWriter(file, fieldnames=keys) dw.writeheader() dw.writerows(d) elif f == 'md': file.write('## %s - GitHub repositories\n' % u) for row in d: file.write( '#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'], ... row['stars'])) elif f == 'txt': file.write('%s - GitHub repositories\n\n' % u) for row in d: file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'], row['desc'], row['lang'], row['stars'])) file.close() else: print(tabulate(d, headers="keys")) # ... rest of the code ...
0d0d43f957cb79a99eaacef0623cd57351ca40f6
test/factories.py
test/factories.py
import factory from django.contrib.auth.models import User class UserFactory(factory.Factory): FACTORY_FOR = User first_name = "Boy" last_name = "Factory" email = factory.LazyAttribute( lambda a: "{0}_{1}@example.com".format(a.first_name, a.last_name).lower()) username = factory.Sequence(lambda n: "username_%s" % n)
import factory from django.contrib.auth.models import User class UserFactory(factory.Factory): FACTORY_FOR = User first_name = "Boy" last_name = "Factory" email = factory.LazyAttribute( lambda a: "{0}_{1}@example.com".format(a.first_name, a.last_name).lower()) username = factory.Sequence(lambda n: "username_%s" % n) is_active = False is_staff = False is_superuser = False
Fix userfactory - set user flags
Fix userfactory - set user flags
Python
mit
sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa
import factory from django.contrib.auth.models import User class UserFactory(factory.Factory): FACTORY_FOR = User first_name = "Boy" last_name = "Factory" email = factory.LazyAttribute( lambda a: "{0}_{1}@example.com".format(a.first_name, a.last_name).lower()) username = factory.Sequence(lambda n: "username_%s" % n) + is_active = False + is_staff = False + is_superuser = False
Fix userfactory - set user flags
## Code Before: import factory from django.contrib.auth.models import User class UserFactory(factory.Factory): FACTORY_FOR = User first_name = "Boy" last_name = "Factory" email = factory.LazyAttribute( lambda a: "{0}_{1}@example.com".format(a.first_name, a.last_name).lower()) username = factory.Sequence(lambda n: "username_%s" % n) ## Instruction: Fix userfactory - set user flags ## Code After: import factory from django.contrib.auth.models import User class UserFactory(factory.Factory): FACTORY_FOR = User first_name = "Boy" last_name = "Factory" email = factory.LazyAttribute( lambda a: "{0}_{1}@example.com".format(a.first_name, a.last_name).lower()) username = factory.Sequence(lambda n: "username_%s" % n) is_active = False is_staff = False is_superuser = False
... username = factory.Sequence(lambda n: "username_%s" % n) is_active = False is_staff = False is_superuser = False ...
f00fd0fde81a340cf00030bbe2562b8c878edd41
src/yawf/signals.py
src/yawf/signals.py
from django.dispatch import Signal message_handled = Signal(providing_args=['message', 'instance', 'new_revision'])
from django.dispatch import Signal message_handled = Signal( providing_args=['message', 'instance', 'new_revision', 'transition_result'] )
Add new arg in message_handled signal definition
Add new arg in message_handled signal definition
Python
mit
freevoid/yawf
from django.dispatch import Signal - message_handled = Signal(providing_args=['message', 'instance', 'new_revision']) + message_handled = Signal( + providing_args=['message', 'instance', 'new_revision', 'transition_result'] + )
Add new arg in message_handled signal definition
## Code Before: from django.dispatch import Signal message_handled = Signal(providing_args=['message', 'instance', 'new_revision']) ## Instruction: Add new arg in message_handled signal definition ## Code After: from django.dispatch import Signal message_handled = Signal( providing_args=['message', 'instance', 'new_revision', 'transition_result'] )
// ... existing code ... message_handled = Signal( providing_args=['message', 'instance', 'new_revision', 'transition_result'] ) // ... rest of the code ...
8571f61a20f9ef536040c3101e24c48640a72f6a
iss/admin.py
iss/admin.py
from django.contrib import admin from .models import Organization class OrganizationAdmin(admin.ModelAdmin): list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso') search_fields = ('org_name', 'account_num') admin.site.register(Organization, OrganizationAdmin)
from django.contrib import admin from .models import Organization class OrganizationAdmin(admin.ModelAdmin): list_display = ('membersuite_id', 'account_num', 'org_name', 'city', 'state', 'country_iso') search_fields = ('org_name', 'membersuite_id', 'account_num') admin.site.register(Organization, OrganizationAdmin)
Add membersuite ID to display and search
Add membersuite ID to display and search
Python
mit
AASHE/iss
from django.contrib import admin from .models import Organization class OrganizationAdmin(admin.ModelAdmin): - list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso') + list_display = ('membersuite_id', 'account_num', 'org_name', 'city', + 'state', 'country_iso') - search_fields = ('org_name', 'account_num') + search_fields = ('org_name', 'membersuite_id', 'account_num') admin.site.register(Organization, OrganizationAdmin)
Add membersuite ID to display and search
## Code Before: from django.contrib import admin from .models import Organization class OrganizationAdmin(admin.ModelAdmin): list_display = ('account_num', 'org_name', 'city', 'state', 'country_iso') search_fields = ('org_name', 'account_num') admin.site.register(Organization, OrganizationAdmin) ## Instruction: Add membersuite ID to display and search ## Code After: from django.contrib import admin from .models import Organization class OrganizationAdmin(admin.ModelAdmin): list_display = ('membersuite_id', 'account_num', 'org_name', 'city', 'state', 'country_iso') search_fields = ('org_name', 'membersuite_id', 'account_num') admin.site.register(Organization, OrganizationAdmin)
// ... existing code ... class OrganizationAdmin(admin.ModelAdmin): list_display = ('membersuite_id', 'account_num', 'org_name', 'city', 'state', 'country_iso') search_fields = ('org_name', 'membersuite_id', 'account_num') admin.site.register(Organization, OrganizationAdmin) // ... rest of the code ...
4e4262f3d9cde4394d08681c517fcec4e2e9a336
shellpython/tests/test_helpers.py
shellpython/tests/test_helpers.py
import unittest import tempfile import os from shellpython.helpers import Dir class TestDirectory(unittest.TestCase): def test_relative_dirs(self): cur_dir = os.path.split(__file__)[0] with Dir(os.path.join(cur_dir, 'data')): self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd()) with Dir(os.path.join('locator')): self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd()) def test_absolute_dirs(self): with Dir(tempfile.gettempdir()): self.assertEqual(tempfile.gettempdir(), os.getcwd())
import unittest import tempfile import os from os import path from shellpython.helpers import Dir class TestDirectory(unittest.TestCase): def test_relative_dirs(self): cur_dir = path.dirname(path.abspath(__file__)) with Dir(path.join(cur_dir, 'data')): self.assertEqual(path.join(cur_dir, 'data'), os.getcwd()) with Dir(path.join('locator')): self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd()) def test_absolute_dirs(self): with Dir(tempfile.gettempdir()): self.assertEqual(tempfile.gettempdir(), os.getcwd())
Fix directory tests, __file__ may return relative path and now it is taken into consideration
Fix directory tests, __file__ may return relative path and now it is taken into consideration
Python
bsd-3-clause
lamerman/shellpy
import unittest import tempfile import os + from os import path from shellpython.helpers import Dir class TestDirectory(unittest.TestCase): def test_relative_dirs(self): - cur_dir = os.path.split(__file__)[0] + cur_dir = path.dirname(path.abspath(__file__)) - with Dir(os.path.join(cur_dir, 'data')): + with Dir(path.join(cur_dir, 'data')): - self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd()) + self.assertEqual(path.join(cur_dir, 'data'), os.getcwd()) - with Dir(os.path.join('locator')): + with Dir(path.join('locator')): - self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd()) + self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd()) def test_absolute_dirs(self): with Dir(tempfile.gettempdir()): self.assertEqual(tempfile.gettempdir(), os.getcwd())
Fix directory tests, __file__ may return relative path and now it is taken into consideration
## Code Before: import unittest import tempfile import os from shellpython.helpers import Dir class TestDirectory(unittest.TestCase): def test_relative_dirs(self): cur_dir = os.path.split(__file__)[0] with Dir(os.path.join(cur_dir, 'data')): self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd()) with Dir(os.path.join('locator')): self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd()) def test_absolute_dirs(self): with Dir(tempfile.gettempdir()): self.assertEqual(tempfile.gettempdir(), os.getcwd()) ## Instruction: Fix directory tests, __file__ may return relative path and now it is taken into consideration ## Code After: import unittest import tempfile import os from os import path from shellpython.helpers import Dir class TestDirectory(unittest.TestCase): def test_relative_dirs(self): cur_dir = path.dirname(path.abspath(__file__)) with Dir(path.join(cur_dir, 'data')): self.assertEqual(path.join(cur_dir, 'data'), os.getcwd()) with Dir(path.join('locator')): self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd()) def test_absolute_dirs(self): with Dir(tempfile.gettempdir()): self.assertEqual(tempfile.gettempdir(), os.getcwd())
// ... existing code ... import os from os import path from shellpython.helpers import Dir // ... modified code ... def test_relative_dirs(self): cur_dir = path.dirname(path.abspath(__file__)) with Dir(path.join(cur_dir, 'data')): self.assertEqual(path.join(cur_dir, 'data'), os.getcwd()) with Dir(path.join('locator')): self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd()) // ... rest of the code ...
bcb24ef03a65d80c09ef47f19a64fd854a70c082
tests/chainer_tests/training_tests/extensions_tests/test_print_report.py
tests/chainer_tests/training_tests/extensions_tests/test_print_report.py
import sys import unittest from mock import MagicMock from chainer import testing from chainer.training import extensions class TestPrintReport(unittest.TestCase): def _setup(self, delete_flush=False): self.logreport = MagicMock(spec=extensions.LogReport( ['epoch'], trigger=(1, 'iteration'), log_name=None)) self.stream = MagicMock() if delete_flush: del self.stream.flush self.report = extensions.PrintReport( ['epoch'], log_report=self.logreport, out=self.stream) self.trainer = testing.get_trainer_with_mock_updater( stop_trigger=(1, 'iteration')) self.trainer.extend(self.logreport) self.trainer.extend(self.report) self.logreport.log = [{'epoch': 0}] def test_stream_with_flush_is_flushed(self): self._setup(delete_flush=False) self.assertTrue(hasattr(self.stream, 'flush')) self.stream.flush.assert_not_called() self.report(self.trainer) self.stream.flush.assert_called_with() def test_stream_without_flush_raises_no_exception(self): self._setup(delete_flush=True) self.assertFalse(hasattr(self.stream, 'flush')) self.report(self.trainer) testing.run_module(__name__, __file__)
import sys import unittest from mock import MagicMock from chainer import testing from chainer.training import extensions class TestPrintReport(unittest.TestCase): def _setup(self, stream=None, delete_flush=False): self.logreport = MagicMock(spec=extensions.LogReport( ['epoch'], trigger=(1, 'iteration'), log_name=None)) if stream is None: self.stream = MagicMock() if delete_flush: del self.stream.flush else: self.stream = stream self.report = extensions.PrintReport( ['epoch'], log_report=self.logreport, out=self.stream) self.trainer = testing.get_trainer_with_mock_updater( stop_trigger=(1, 'iteration')) self.trainer.extend(self.logreport) self.trainer.extend(self.report) self.logreport.log = [{'epoch': 0}] def test_stream_with_flush_is_flushed(self): self._setup(delete_flush=False) self.assertTrue(hasattr(self.stream, 'flush')) self.stream.flush.assert_not_called() self.report(self.trainer) self.stream.flush.assert_called_with() def test_stream_without_flush_raises_no_exception(self): self._setup(delete_flush=True) self.assertFalse(hasattr(self.stream, 'flush')) self.report(self.trainer) def test_real_stream_raises_no_exception(self): self._setup(stream=sys.stderr) self.report(self.trainer) testing.run_module(__name__, __file__)
Test PrintReport with a real stream
Test PrintReport with a real stream
Python
mit
ktnyt/chainer,pfnet/chainer,rezoo/chainer,hvy/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer,okuta/chainer,jnishi/chainer,niboshi/chainer,hvy/chainer,jnishi/chainer,hvy/chainer,chainer/chainer,chainer/chainer,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer,niboshi/chainer,wkentaro/chainer,chainer/chainer,ktnyt/chainer,niboshi/chainer,wkentaro/chainer,ktnyt/chainer,chainer/chainer,okuta/chainer,jnishi/chainer,jnishi/chainer,tkerola/chainer,wkentaro/chainer
import sys import unittest from mock import MagicMock from chainer import testing from chainer.training import extensions + class TestPrintReport(unittest.TestCase): - def _setup(self, delete_flush=False): + def _setup(self, stream=None, delete_flush=False): self.logreport = MagicMock(spec=extensions.LogReport( ['epoch'], trigger=(1, 'iteration'), log_name=None)) + if stream is None: - self.stream = MagicMock() + self.stream = MagicMock() - if delete_flush: + if delete_flush: - del self.stream.flush + del self.stream.flush + else: + self.stream = stream self.report = extensions.PrintReport( ['epoch'], log_report=self.logreport, out=self.stream) self.trainer = testing.get_trainer_with_mock_updater( stop_trigger=(1, 'iteration')) self.trainer.extend(self.logreport) self.trainer.extend(self.report) self.logreport.log = [{'epoch': 0}] def test_stream_with_flush_is_flushed(self): self._setup(delete_flush=False) self.assertTrue(hasattr(self.stream, 'flush')) self.stream.flush.assert_not_called() self.report(self.trainer) self.stream.flush.assert_called_with() def test_stream_without_flush_raises_no_exception(self): self._setup(delete_flush=True) self.assertFalse(hasattr(self.stream, 'flush')) self.report(self.trainer) + def test_real_stream_raises_no_exception(self): + self._setup(stream=sys.stderr) + self.report(self.trainer) + testing.run_module(__name__, __file__)
Test PrintReport with a real stream
## Code Before: import sys import unittest from mock import MagicMock from chainer import testing from chainer.training import extensions class TestPrintReport(unittest.TestCase): def _setup(self, delete_flush=False): self.logreport = MagicMock(spec=extensions.LogReport( ['epoch'], trigger=(1, 'iteration'), log_name=None)) self.stream = MagicMock() if delete_flush: del self.stream.flush self.report = extensions.PrintReport( ['epoch'], log_report=self.logreport, out=self.stream) self.trainer = testing.get_trainer_with_mock_updater( stop_trigger=(1, 'iteration')) self.trainer.extend(self.logreport) self.trainer.extend(self.report) self.logreport.log = [{'epoch': 0}] def test_stream_with_flush_is_flushed(self): self._setup(delete_flush=False) self.assertTrue(hasattr(self.stream, 'flush')) self.stream.flush.assert_not_called() self.report(self.trainer) self.stream.flush.assert_called_with() def test_stream_without_flush_raises_no_exception(self): self._setup(delete_flush=True) self.assertFalse(hasattr(self.stream, 'flush')) self.report(self.trainer) testing.run_module(__name__, __file__) ## Instruction: Test PrintReport with a real stream ## Code After: import sys import unittest from mock import MagicMock from chainer import testing from chainer.training import extensions class TestPrintReport(unittest.TestCase): def _setup(self, stream=None, delete_flush=False): self.logreport = MagicMock(spec=extensions.LogReport( ['epoch'], trigger=(1, 'iteration'), log_name=None)) if stream is None: self.stream = MagicMock() if delete_flush: del self.stream.flush else: self.stream = stream self.report = extensions.PrintReport( ['epoch'], log_report=self.logreport, out=self.stream) self.trainer = testing.get_trainer_with_mock_updater( stop_trigger=(1, 'iteration')) self.trainer.extend(self.logreport) self.trainer.extend(self.report) self.logreport.log = [{'epoch': 0}] def test_stream_with_flush_is_flushed(self): self._setup(delete_flush=False) self.assertTrue(hasattr(self.stream, 'flush')) self.stream.flush.assert_not_called() self.report(self.trainer) self.stream.flush.assert_called_with() def test_stream_without_flush_raises_no_exception(self): self._setup(delete_flush=True) self.assertFalse(hasattr(self.stream, 'flush')) self.report(self.trainer) def test_real_stream_raises_no_exception(self): self._setup(stream=sys.stderr) self.report(self.trainer) testing.run_module(__name__, __file__)
# ... existing code ... class TestPrintReport(unittest.TestCase): def _setup(self, stream=None, delete_flush=False): self.logreport = MagicMock(spec=extensions.LogReport( # ... modified code ... ['epoch'], trigger=(1, 'iteration'), log_name=None)) if stream is None: self.stream = MagicMock() if delete_flush: del self.stream.flush else: self.stream = stream self.report = extensions.PrintReport( ... def test_real_stream_raises_no_exception(self): self._setup(stream=sys.stderr) self.report(self.trainer) # ... rest of the code ...
04b7e79ce3fed1afac129098badb632ca226fdee
dispatch.py
dispatch.py
import config import steam steam.set_api_key(config.api_key) from optf2.backend import openid from optf2.frontend import render openid.set_session(render.session) import web if config.enable_fastcgi: web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr) if __name__ == "__main__": render.application.run()
import config import steam steam.set_api_key(config.api_key) from optf2.backend import openid from optf2.frontend import render openid.set_session(render.session) import web # wsgi application = render.application.wsgifunc() if config.enable_fastcgi: web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr) if __name__ == "__main__": render.application.run()
Add wsgi handler by default
Add wsgi handler by default
Python
isc
Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2
import config import steam steam.set_api_key(config.api_key) from optf2.backend import openid from optf2.frontend import render openid.set_session(render.session) import web + # wsgi + application = render.application.wsgifunc() + if config.enable_fastcgi: web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr) if __name__ == "__main__": render.application.run()
Add wsgi handler by default
## Code Before: import config import steam steam.set_api_key(config.api_key) from optf2.backend import openid from optf2.frontend import render openid.set_session(render.session) import web if config.enable_fastcgi: web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr) if __name__ == "__main__": render.application.run() ## Instruction: Add wsgi handler by default ## Code After: import config import steam steam.set_api_key(config.api_key) from optf2.backend import openid from optf2.frontend import render openid.set_session(render.session) import web # wsgi application = render.application.wsgifunc() if config.enable_fastcgi: web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr) if __name__ == "__main__": render.application.run()
# ... existing code ... # wsgi application = render.application.wsgifunc() if config.enable_fastcgi: # ... rest of the code ...
3394278f379763dae9db34f3b528a229b8f06bc6
tempora/tests/test_timing.py
tempora/tests/test_timing.py
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
Revert "Use pytest.mark to selectively skip test."
Revert "Use pytest.mark to selectively skip test." Markers can not be applied to fixtures (https://docs.pytest.org/en/latest/reference/reference.html#marks). Fixes #16. This reverts commit 14d532af265e35af33a28d61a68d545993fc5b78.
Python
mit
jaraco/tempora
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') - @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): + if not hasattr(time, 'tzset'): + pytest.skip("tzset not available") + @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
Revert "Use pytest.mark to selectively skip test."
## Code Before: import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1 ## Instruction: Revert "Use pytest.mark to selectively skip test." ## Code After: import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
# ... existing code ... @pytest.fixture # ... modified code ... def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager # ... rest of the code ...
9b8cbfcf33ba644670a42490db7de4249e5ff080
invocations/docs.py
invocations/docs.py
import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def docs(clean=False, browse=False): if clean: clean_docs.body() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: browse_docs.body()
import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def docs(clean=False, browse=False): if clean: clean_docs() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: browse_docs()
Leverage __call__ on task downstream
Leverage __call__ on task downstream
Python
bsd-2-clause
mrjmad/invocations,alex/invocations,pyinvoke/invocations,singingwolfboy/invocations
import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def docs(clean=False, browse=False): if clean: - clean_docs.body() + clean_docs() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: - browse_docs.body() + browse_docs()
Leverage __call__ on task downstream
## Code Before: import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def docs(clean=False, browse=False): if clean: clean_docs.body() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: browse_docs.body() ## Instruction: Leverage __call__ on task downstream ## Code After: import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def docs(clean=False, browse=False): if clean: clean_docs() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: browse_docs()
// ... existing code ... if clean: clean_docs() run("sphinx-build %s %s" % (docs_dir, build), pty=True) // ... modified code ... if browse: browse_docs() // ... rest of the code ...
3f2f8e1cf57c44b589b053902e2945cd2486414d
src/dashboard/src/components/accounts/backends.py
src/dashboard/src/components/accounts/backends.py
import re from django.conf import settings from django.dispatch import receiver from django_auth_ldap.backend import LDAPBackend, populate_user from shibboleth.backends import ShibbolethRemoteUserBackend from components.helpers import generate_api_key class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend): def configure_user(self, user): generate_api_key(user) return user class CustomLDAPBackend(LDAPBackend): """Customize LDAP config.""" def __init__(self): super(CustomLDAPBackend, self).__init__() self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX def ldap_to_django_username(self, username): # Replaces user creation in get_ldap_users return re.sub(self._username_suffix + "$", "", username) def django_to_ldap_username(self, username): # Replaces user creation in get_ldap_users return username + self._username_suffix @receiver(populate_user) def ldap_populate_user(sender, user, ldap_user, **kwargs): generate_api_key(user)
import re from django.conf import settings from django.dispatch import receiver from django_auth_ldap.backend import LDAPBackend, populate_user from shibboleth.backends import ShibbolethRemoteUserBackend from components.helpers import generate_api_key class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend): def configure_user(self, user): generate_api_key(user) return user class CustomLDAPBackend(LDAPBackend): """Customize LDAP config.""" def __init__(self): super(CustomLDAPBackend, self).__init__() self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX def ldap_to_django_username(self, username): # Replaces user creation in get_ldap_users return re.sub(self._username_suffix + "$", "", username) def django_to_ldap_username(self, username): # Replaces user creation in get_ldap_users return username + self._username_suffix @receiver(populate_user) def ldap_populate_user(sender, user, ldap_user, **kwargs): if user.pk is None: user.save() generate_api_key(user)
Fix API key generation with LDAP
Fix API key generation with LDAP * Only generate on first login, not every login * Handle case where user has not yet been saved
Python
agpl-3.0
artefactual/archivematica,artefactual/archivematica,artefactual/archivematica,artefactual/archivematica
import re from django.conf import settings from django.dispatch import receiver from django_auth_ldap.backend import LDAPBackend, populate_user from shibboleth.backends import ShibbolethRemoteUserBackend from components.helpers import generate_api_key class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend): def configure_user(self, user): generate_api_key(user) return user class CustomLDAPBackend(LDAPBackend): """Customize LDAP config.""" def __init__(self): super(CustomLDAPBackend, self).__init__() self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX def ldap_to_django_username(self, username): # Replaces user creation in get_ldap_users return re.sub(self._username_suffix + "$", "", username) def django_to_ldap_username(self, username): # Replaces user creation in get_ldap_users return username + self._username_suffix @receiver(populate_user) def ldap_populate_user(sender, user, ldap_user, **kwargs): + if user.pk is None: + user.save() - generate_api_key(user) + generate_api_key(user)
Fix API key generation with LDAP
## Code Before: import re from django.conf import settings from django.dispatch import receiver from django_auth_ldap.backend import LDAPBackend, populate_user from shibboleth.backends import ShibbolethRemoteUserBackend from components.helpers import generate_api_key class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend): def configure_user(self, user): generate_api_key(user) return user class CustomLDAPBackend(LDAPBackend): """Customize LDAP config.""" def __init__(self): super(CustomLDAPBackend, self).__init__() self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX def ldap_to_django_username(self, username): # Replaces user creation in get_ldap_users return re.sub(self._username_suffix + "$", "", username) def django_to_ldap_username(self, username): # Replaces user creation in get_ldap_users return username + self._username_suffix @receiver(populate_user) def ldap_populate_user(sender, user, ldap_user, **kwargs): generate_api_key(user) ## Instruction: Fix API key generation with LDAP ## Code After: import re from django.conf import settings from django.dispatch import receiver from django_auth_ldap.backend import LDAPBackend, populate_user from shibboleth.backends import ShibbolethRemoteUserBackend from components.helpers import generate_api_key class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend): def configure_user(self, user): generate_api_key(user) return user class CustomLDAPBackend(LDAPBackend): """Customize LDAP config.""" def __init__(self): super(CustomLDAPBackend, self).__init__() self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX def ldap_to_django_username(self, username): # Replaces user creation in get_ldap_users return re.sub(self._username_suffix + "$", "", username) def django_to_ldap_username(self, username): # Replaces user creation in get_ldap_users return username + self._username_suffix @receiver(populate_user) def ldap_populate_user(sender, user, ldap_user, **kwargs): if user.pk is None: user.save() generate_api_key(user)
// ... existing code ... def ldap_populate_user(sender, user, ldap_user, **kwargs): if user.pk is None: user.save() generate_api_key(user) // ... rest of the code ...
e3f8fa13758ebed06abc1369d8c85474f7346d29
api/nodes/urls.py
api/nodes/urls.py
from django.conf.urls import url from api.nodes import views urlpatterns = [ # Examples: # url(r'^$', 'api.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', views.NodeList.as_view(), name='node-list'), url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'), url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'), url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'), url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'), url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'), url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ]
from django.conf.urls import url from api.nodes import views urlpatterns = [ # Examples: # url(r'^$', 'api.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', views.NodeList.as_view(), name='node-list'), url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'), url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'), url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'), url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'), url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'), url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'), url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ]
Add second delete url where users will send request to confirm they want to bulk delete.
Add second delete url where users will send request to confirm they want to bulk delete.
Python
apache-2.0
GageGaskins/osf.io,adlius/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,chrisseto/osf.io,abought/osf.io,GageGaskins/osf.io,binoculars/osf.io,RomanZWang/osf.io,danielneis/osf.io,Nesiehr/osf.io,KAsante95/osf.io,baylee-d/osf.io,billyhunt/osf.io,adlius/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,erinspace/osf.io,crcresearch/osf.io,SSJohns/osf.io,erinspace/osf.io,cwisecarver/osf.io,hmoco/osf.io,crcresearch/osf.io,binoculars/osf.io,leb2dg/osf.io,mluke93/osf.io,jnayak1/osf.io,chrisseto/osf.io,caneruguz/osf.io,mluke93/osf.io,samanehsan/osf.io,cslzchen/osf.io,chrisseto/osf.io,alexschiller/osf.io,abought/osf.io,caseyrygt/osf.io,icereval/osf.io,mluo613/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,icereval/osf.io,laurenrevere/osf.io,felliott/osf.io,acshi/osf.io,kwierman/osf.io,RomanZWang/osf.io,abought/osf.io,acshi/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,rdhyee/osf.io,crcresearch/osf.io,SSJohns/osf.io,emetsger/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,emetsger/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,chrisseto/osf.io,amyshi188/osf.io,kch8qx/osf.io,emetsger/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,mluke93/osf.io,DanielSBrown/osf.io,emetsger/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,GageGaskins/osf.io,asanfilippo7/osf.io,doublebits/osf.io,chennan47/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,acshi/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,caseyrollins/osf.io,baylee-d/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,kch8qx/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,TomBaxter/osf.io,mattclark/osf.io,cslzchen/osf.io,abought/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,amyshi188/osf.io,alexschiller/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,doublebits/osf.io,felliott/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,acshi/osf.io,mluo613/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,acshi/osf.io,pattisdr/osf.io,SSJohns/osf.io,mfraezz/osf.io,alexschiller/osf.io,danielneis/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,mluke93/osf.io,saradbowman/osf.io,mattclark/osf.io,wearpants/osf.io,leb2dg/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,sloria/osf.io,samanehsan/osf.io,caneruguz/osf.io,amyshi188/osf.io,KAsante95/osf.io,billyhunt/osf.io,cslzchen/osf.io,ZobairAlijan/osf.io,felliott/osf.io,zachjanicki/osf.io,aaxelb/osf.io,aaxelb/osf.io,hmoco/osf.io,sloria/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,cwisecarver/osf.io,kwierman/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,mattclark/osf.io,rdhyee/osf.io,chennan47/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,GageGaskins/osf.io,jnayak1/osf.io,mluo613/osf.io,kwierman/osf.io,baylee-d/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,brianjgeiger/osf.io,mluo613/osf.io,sloria/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,Ghalko/osf.io,alexschiller/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,binoculars/osf.io,zamattiac/osf.io,adlius/osf.io,kch8qx/osf.io,wearpants/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,mfraezz/osf.io,icereval/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,danielneis/osf.io,samanehsan/osf.io,samchrisinger/osf.io,danielneis/osf.io,amyshi188/osf.io,hmoco/osf.io,doublebits/osf.io,aaxelb/osf.io,rdhyee/osf.io,kch8qx/osf.io,laurenrevere/osf.io,erinspace/osf.io,samchrisinger/osf.io,kwierman/osf.io,KAsante95/osf.io,adlius/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,felliott/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,chennan47/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,doublebits/osf.io,Nesiehr/osf.io,jnayak1/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,leb2dg/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,kch8qx/osf.io,caseyrygt/osf.io
from django.conf.urls import url from api.nodes import views urlpatterns = [ # Examples: # url(r'^$', 'api.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', views.NodeList.as_view(), name='node-list'), + url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'), url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'), url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'), url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'), url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'), url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'), url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ]
Add second delete url where users will send request to confirm they want to bulk delete.
## Code Before: from django.conf.urls import url from api.nodes import views urlpatterns = [ # Examples: # url(r'^$', 'api.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', views.NodeList.as_view(), name='node-list'), url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'), url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'), url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'), url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'), url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'), url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ] ## Instruction: Add second delete url where users will send request to confirm they want to bulk delete. ## Code After: from django.conf.urls import url from api.nodes import views urlpatterns = [ # Examples: # url(r'^$', 'api.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^$', views.NodeList.as_view(), name='node-list'), url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'), url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'), url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'), url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'), url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'), url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'), url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'), url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'), ]
# ... existing code ... url(r'^$', views.NodeList.as_view(), name='node-list'), url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'), url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'), # ... rest of the code ...
21d45e38d07a413aeeb19e10a68e540d1f6d5851
core/forms.py
core/forms.py
from core import settings as stCore from django import forms from django.conf import settings as st from django.contrib.flatpages.admin import FlatpageForm from django.contrib.sites.models import Site from django.forms.widgets import HiddenInput, MultipleHiddenInput class PageForm(FlatpageForm): url = forms.CharField(label='', max_length=100, required=False) sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(), required=False, label='') def __init__(self, *args, **kwargs): super(FlatpageForm, self).__init__(*args, **kwargs) self.fields['url'].initial = stCore.BASE_URL_FLATPAGES self.fields['url'].widget = HiddenInput() self.fields['sites'].widget = MultipleHiddenInput() def clean_url(self): return True def save(self, commit=True): flatpage = super(PageForm, self).save(commit=False) flatpage.save() flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/' flatpage.sites.add(Site.objects.get(id=st.SITE_ID)) return flatpage class Meta: widgets = { 'content': forms.widgets.Textarea(), } class Media: js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
from core import settings as stCore from django import forms from django.conf import settings as st from flatpages_i18n.forms import FlatpageForm from django.contrib.sites.models import Site from django.forms.widgets import HiddenInput, MultipleHiddenInput class PageForm(FlatpageForm): url = forms.CharField(label='', max_length=100, required=False) sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(), required=False, label='') def __init__(self, *args, **kwargs): super(FlatpageForm, self).__init__(*args, **kwargs) self.fields['url'].initial = stCore.BASE_URL_FLATPAGES self.fields['url'].widget = HiddenInput() self.fields['sites'].widget = MultipleHiddenInput() def clean_url(self): return True def save(self, commit=True): flatpage = super(PageForm, self).save(commit=False) flatpage.save() flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/' flatpage.sites.add(Site.objects.get(id=st.SITE_ID)) return flatpage class Meta: widgets = { 'content': forms.widgets.Textarea(), } class Media: js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
Remove last references to flatpage so it doesnt show up on admin page
Remove last references to flatpage so it doesnt show up on admin page
Python
agpl-3.0
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
from core import settings as stCore from django import forms from django.conf import settings as st - from django.contrib.flatpages.admin import FlatpageForm + from flatpages_i18n.forms import FlatpageForm from django.contrib.sites.models import Site from django.forms.widgets import HiddenInput, MultipleHiddenInput class PageForm(FlatpageForm): url = forms.CharField(label='', max_length=100, required=False) sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(), required=False, label='') def __init__(self, *args, **kwargs): super(FlatpageForm, self).__init__(*args, **kwargs) self.fields['url'].initial = stCore.BASE_URL_FLATPAGES self.fields['url'].widget = HiddenInput() self.fields['sites'].widget = MultipleHiddenInput() def clean_url(self): return True def save(self, commit=True): flatpage = super(PageForm, self).save(commit=False) flatpage.save() flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/' flatpage.sites.add(Site.objects.get(id=st.SITE_ID)) return flatpage class Meta: widgets = { 'content': forms.widgets.Textarea(), } class Media: js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
Remove last references to flatpage so it doesnt show up on admin page
## Code Before: from core import settings as stCore from django import forms from django.conf import settings as st from django.contrib.flatpages.admin import FlatpageForm from django.contrib.sites.models import Site from django.forms.widgets import HiddenInput, MultipleHiddenInput class PageForm(FlatpageForm): url = forms.CharField(label='', max_length=100, required=False) sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(), required=False, label='') def __init__(self, *args, **kwargs): super(FlatpageForm, self).__init__(*args, **kwargs) self.fields['url'].initial = stCore.BASE_URL_FLATPAGES self.fields['url'].widget = HiddenInput() self.fields['sites'].widget = MultipleHiddenInput() def clean_url(self): return True def save(self, commit=True): flatpage = super(PageForm, self).save(commit=False) flatpage.save() flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/' flatpage.sites.add(Site.objects.get(id=st.SITE_ID)) return flatpage class Meta: widgets = { 'content': forms.widgets.Textarea(), } class Media: js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA) ## Instruction: Remove last references to flatpage so it doesnt show up on admin page ## Code After: from core import settings as stCore from django import forms from django.conf import settings as st from flatpages_i18n.forms import FlatpageForm from django.contrib.sites.models import Site from django.forms.widgets import HiddenInput, MultipleHiddenInput class PageForm(FlatpageForm): url = forms.CharField(label='', max_length=100, required=False) sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(), required=False, label='') def __init__(self, *args, **kwargs): super(FlatpageForm, self).__init__(*args, **kwargs) self.fields['url'].initial = stCore.BASE_URL_FLATPAGES self.fields['url'].widget = HiddenInput() self.fields['sites'].widget = MultipleHiddenInput() def clean_url(self): return True def save(self, commit=True): flatpage = super(PageForm, self).save(commit=False) flatpage.save() flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/' flatpage.sites.add(Site.objects.get(id=st.SITE_ID)) return flatpage class Meta: widgets = { 'content': forms.widgets.Textarea(), } class Media: js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
// ... existing code ... from django.conf import settings as st from flatpages_i18n.forms import FlatpageForm from django.contrib.sites.models import Site // ... rest of the code ...
32a3ab4f677086916bdba6e7a8be41c9e62d7da0
appengine_config.py
appengine_config.py
"""Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Declare the Django version we need. from google.appengine.dist import use_library use_library('django', '1.2') # Fail early if we can't import Django 1.x. Log identifying information. import django logging.info('django.__file__ = %r, django.VERSION = %r', django.__file__, django.VERSION) assert django.VERSION[0] >= 1, "This Django version is too old" # Custom Django configuration. # NOTE: All "main" scripts must import webapp.template before django. os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings settings._target = None
"""Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' if '/diff/' in path: return '/X/diff/...' if '/diff2/' in path: return '/X/diff2/...' if '/patch/' in path: return '/X/patch/...' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Declare the Django version we need. from google.appengine.dist import use_library use_library('django', '1.2') # Fail early if we can't import Django 1.x. Log identifying information. import django logging.info('django.__file__ = %r, django.VERSION = %r', django.__file__, django.VERSION) assert django.VERSION[0] >= 1, "This Django version is too old" # Custom Django configuration. # NOTE: All "main" scripts must import webapp.template before django. os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings settings._target = None
Improve custom appstats path normalization.
Improve custom appstats path normalization.
Python
apache-2.0
ligthyear/quick-check,ligthyear/quick-check
"""Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' + if '/diff/' in path: + return '/X/diff/...' + if '/diff2/' in path: + return '/X/diff2/...' + if '/patch/' in path: + return '/X/patch/...' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Declare the Django version we need. from google.appengine.dist import use_library use_library('django', '1.2') # Fail early if we can't import Django 1.x. Log identifying information. import django logging.info('django.__file__ = %r, django.VERSION = %r', django.__file__, django.VERSION) assert django.VERSION[0] >= 1, "This Django version is too old" # Custom Django configuration. # NOTE: All "main" scripts must import webapp.template before django. os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings settings._target = None
Improve custom appstats path normalization.
## Code Before: """Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Declare the Django version we need. from google.appengine.dist import use_library use_library('django', '1.2') # Fail early if we can't import Django 1.x. Log identifying information. import django logging.info('django.__file__ = %r, django.VERSION = %r', django.__file__, django.VERSION) assert django.VERSION[0] >= 1, "This Django version is too old" # Custom Django configuration. # NOTE: All "main" scripts must import webapp.template before django. os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings settings._target = None ## Instruction: Improve custom appstats path normalization. ## Code After: """Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' if '/diff/' in path: return '/X/diff/...' if '/diff2/' in path: return '/X/diff2/...' if '/patch/' in path: return '/X/patch/...' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Declare the Django version we need. from google.appengine.dist import use_library use_library('django', '1.2') # Fail early if we can't import Django 1.x. Log identifying information. import django logging.info('django.__file__ = %r, django.VERSION = %r', django.__file__, django.VERSION) assert django.VERSION[0] >= 1, "This Django version is too old" # Custom Django configuration. # NOTE: All "main" scripts must import webapp.template before django. os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.conf import settings settings._target = None
# ... existing code ... return '/user_popup/X' if '/diff/' in path: return '/X/diff/...' if '/diff2/' in path: return '/X/diff2/...' if '/patch/' in path: return '/X/patch/...' if path.startswith('/rss/'): # ... rest of the code ...
2a7d28573d1e4f07250da1d30209304fdb6de90d
sqlobject/tests/test_blob.py
sqlobject/tests/test_blob.py
import pytest from sqlobject import BLOBCol, SQLObject from sqlobject.compat import PY2 from sqlobject.tests.dbtest import setupClass, supports ######################################## # BLOB columns ######################################## class ImageData(SQLObject): image = BLOBCol(default=b'emptydata', length=256) def test_BLOBCol(): if not supports('blobData'): pytest.skip("blobData isn't supported") setupClass(ImageData) if PY2: data = ''.join([chr(x) for x in range(256)]) else: data = bytes(range(256)) prof = ImageData() prof.image = data iid = prof.id ImageData._connection.cache.clear() prof2 = ImageData.get(iid) assert prof2.image == data ImageData(image='string') assert ImageData.selectBy(image='string').count() == 1
import pytest from sqlobject import BLOBCol, SQLObject from sqlobject.compat import PY2 from sqlobject.tests.dbtest import setupClass, supports ######################################## # BLOB columns ######################################## class ImageData(SQLObject): image = BLOBCol(default=b'emptydata', length=256) def test_BLOBCol(): if not supports('blobData'): pytest.skip("blobData isn't supported") setupClass(ImageData) if PY2: data = ''.join([chr(x) for x in range(256)]) else: data = bytes(range(256)) prof = ImageData(image=data) iid = prof.id ImageData._connection.cache.clear() prof2 = ImageData.get(iid) assert prof2.image == data ImageData(image=b'string') assert ImageData.selectBy(image=b'string').count() == 1
Use byte string for test
Tests(blob): Use byte string for test
Python
lgpl-2.1
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
import pytest from sqlobject import BLOBCol, SQLObject from sqlobject.compat import PY2 from sqlobject.tests.dbtest import setupClass, supports ######################################## # BLOB columns ######################################## class ImageData(SQLObject): image = BLOBCol(default=b'emptydata', length=256) def test_BLOBCol(): if not supports('blobData'): pytest.skip("blobData isn't supported") setupClass(ImageData) if PY2: data = ''.join([chr(x) for x in range(256)]) else: data = bytes(range(256)) - prof = ImageData() + prof = ImageData(image=data) - prof.image = data iid = prof.id ImageData._connection.cache.clear() prof2 = ImageData.get(iid) assert prof2.image == data - ImageData(image='string') + ImageData(image=b'string') - assert ImageData.selectBy(image='string').count() == 1 + assert ImageData.selectBy(image=b'string').count() == 1
Use byte string for test
## Code Before: import pytest from sqlobject import BLOBCol, SQLObject from sqlobject.compat import PY2 from sqlobject.tests.dbtest import setupClass, supports ######################################## # BLOB columns ######################################## class ImageData(SQLObject): image = BLOBCol(default=b'emptydata', length=256) def test_BLOBCol(): if not supports('blobData'): pytest.skip("blobData isn't supported") setupClass(ImageData) if PY2: data = ''.join([chr(x) for x in range(256)]) else: data = bytes(range(256)) prof = ImageData() prof.image = data iid = prof.id ImageData._connection.cache.clear() prof2 = ImageData.get(iid) assert prof2.image == data ImageData(image='string') assert ImageData.selectBy(image='string').count() == 1 ## Instruction: Use byte string for test ## Code After: import pytest from sqlobject import BLOBCol, SQLObject from sqlobject.compat import PY2 from sqlobject.tests.dbtest import setupClass, supports ######################################## # BLOB columns ######################################## class ImageData(SQLObject): image = BLOBCol(default=b'emptydata', length=256) def test_BLOBCol(): if not supports('blobData'): pytest.skip("blobData isn't supported") setupClass(ImageData) if PY2: data = ''.join([chr(x) for x in range(256)]) else: data = bytes(range(256)) prof = ImageData(image=data) iid = prof.id ImageData._connection.cache.clear() prof2 = ImageData.get(iid) assert prof2.image == data ImageData(image=b'string') assert ImageData.selectBy(image=b'string').count() == 1
# ... existing code ... prof = ImageData(image=data) iid = prof.id # ... modified code ... ImageData(image=b'string') assert ImageData.selectBy(image=b'string').count() == 1 # ... rest of the code ...
540bfff4a0622c3d9a001c09f0c39e65b29e1a0c
mrbelvedereci/build/management/commands/metaci_scheduled_jobs.py
mrbelvedereci/build/management/commands/metaci_scheduled_jobs.py
from django.utils import timezone from django.core.management.base import BaseCommand, CommandError from scheduler.models import RepeatableJob class Command(BaseCommand): help = 'Returns the API token for a given username. If one does not exist, a token is first created.' def handle(self, *args, **options): job, created = RepeatableJob.objects.get_or_create( callable = 'mrbelvedereci.build.tasks.check_waiting_builds', enabled = True, name = 'check_waiting_builds', queue = 'short', defaults={ 'interval': 1, 'interval_unit': 'minutes', 'scheduled_time': timezone.now(), } ) if created: self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds')) else: self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds already exists'))
from django.utils import timezone from django.core.management.base import BaseCommand, CommandError from scheduler.models import RepeatableJob class Command(BaseCommand): help = 'Returns the API token for a given username. If one does not exist, a token is first created.' def handle(self, *args, **options): job, created = RepeatableJob.objects.get_or_create( callable = 'mrbelvedereci.build.tasks.check_waiting_builds', enabled = True, name = 'check_waiting_builds', queue = 'short', defaults={ 'interval': 1, 'interval_unit': 'minutes', 'scheduled_time': timezone.now(), } ) if created: self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds with id {}'.format(job.id))) else: self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds with id {} already exists and is {}.'.format(job.id, 'enabled' if job.enabled else 'disabled')))
Add job id and enable/disabled status if the job already exists
Add job id and enable/disabled status if the job already exists
Python
bsd-3-clause
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
from django.utils import timezone from django.core.management.base import BaseCommand, CommandError from scheduler.models import RepeatableJob class Command(BaseCommand): help = 'Returns the API token for a given username. If one does not exist, a token is first created.' def handle(self, *args, **options): job, created = RepeatableJob.objects.get_or_create( callable = 'mrbelvedereci.build.tasks.check_waiting_builds', enabled = True, name = 'check_waiting_builds', queue = 'short', defaults={ 'interval': 1, 'interval_unit': 'minutes', 'scheduled_time': timezone.now(), } ) if created: - self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds')) + self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds with id {}'.format(job.id))) else: - self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds already exists')) + self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds with id {} already exists and is {}.'.format(job.id, 'enabled' if job.enabled else 'disabled')))
Add job id and enable/disabled status if the job already exists
## Code Before: from django.utils import timezone from django.core.management.base import BaseCommand, CommandError from scheduler.models import RepeatableJob class Command(BaseCommand): help = 'Returns the API token for a given username. If one does not exist, a token is first created.' def handle(self, *args, **options): job, created = RepeatableJob.objects.get_or_create( callable = 'mrbelvedereci.build.tasks.check_waiting_builds', enabled = True, name = 'check_waiting_builds', queue = 'short', defaults={ 'interval': 1, 'interval_unit': 'minutes', 'scheduled_time': timezone.now(), } ) if created: self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds')) else: self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds already exists')) ## Instruction: Add job id and enable/disabled status if the job already exists ## Code After: from django.utils import timezone from django.core.management.base import BaseCommand, CommandError from scheduler.models import RepeatableJob class Command(BaseCommand): help = 'Returns the API token for a given username. If one does not exist, a token is first created.' def handle(self, *args, **options): job, created = RepeatableJob.objects.get_or_create( callable = 'mrbelvedereci.build.tasks.check_waiting_builds', enabled = True, name = 'check_waiting_builds', queue = 'short', defaults={ 'interval': 1, 'interval_unit': 'minutes', 'scheduled_time': timezone.now(), } ) if created: self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds with id {}'.format(job.id))) else: self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds with id {} already exists and is {}.'.format(job.id, 'enabled' if job.enabled else 'disabled')))
// ... existing code ... if created: self.stdout.write(self.style.SUCCESS('Created job check_waiting_builds with id {}'.format(job.id))) else: self.stdout.write(self.style.SUCCESS('Scheduled job check_waiting_builds with id {} already exists and is {}.'.format(job.id, 'enabled' if job.enabled else 'disabled'))) // ... rest of the code ...
9fa95b373c2b43c6e0852aff82ec4c31821a7742
scss/tests/test_files.py
scss/tests/test_files.py
from __future__ import absolute_import import glob import os.path import logging import pytest from scss import Scss console = logging.StreamHandler() logger = logging.getLogger('scss') logger.setLevel(logging.ERROR) logger.addHandler(console) def test_pair_programmatic(scss_file_pair): scss_fn, css_fn, pytest_trigger = scss_file_pair if pytest_trigger: pytest_trigger() with open(scss_fn) as fh: source = fh.read() with open(css_fn) as fh: expected = fh.read() directory, _ = os.path.split(scss_fn) include_dir = os.path.join(directory, 'include') compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) actual = compiler.compile(source) # Normalize leading and trailing newlines actual = actual.strip('\n') expected = expected.strip('\n') assert expected == actual
from __future__ import absolute_import import glob import os.path import logging import pytest import scss console = logging.StreamHandler() logger = logging.getLogger('scss') logger.setLevel(logging.ERROR) logger.addHandler(console) def test_pair_programmatic(scss_file_pair): scss_fn, css_fn, pytest_trigger = scss_file_pair if pytest_trigger: pytest_trigger() with open(scss_fn) as fh: source = fh.read() with open(css_fn) as fh: expected = fh.read() directory, _ = os.path.split(scss_fn) include_dir = os.path.join(directory, 'include') scss.config.STATIC_ROOT = os.path.join(directory, 'static') compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) actual = compiler.compile(source) # Normalize leading and trailing newlines actual = actual.strip('\n') expected = expected.strip('\n') assert expected == actual
Add static root path to tests
Add static root path to tests
Python
mit
Kronuz/pyScss,hashamali/pyScss,cpfair/pyScss,cpfair/pyScss,cpfair/pyScss,Kronuz/pyScss,hashamali/pyScss,hashamali/pyScss,Kronuz/pyScss,Kronuz/pyScss
from __future__ import absolute_import import glob import os.path import logging import pytest - from scss import Scss + import scss console = logging.StreamHandler() logger = logging.getLogger('scss') logger.setLevel(logging.ERROR) logger.addHandler(console) def test_pair_programmatic(scss_file_pair): scss_fn, css_fn, pytest_trigger = scss_file_pair if pytest_trigger: pytest_trigger() with open(scss_fn) as fh: source = fh.read() with open(css_fn) as fh: expected = fh.read() directory, _ = os.path.split(scss_fn) include_dir = os.path.join(directory, 'include') + scss.config.STATIC_ROOT = os.path.join(directory, 'static') - compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) + compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) actual = compiler.compile(source) # Normalize leading and trailing newlines actual = actual.strip('\n') expected = expected.strip('\n') assert expected == actual
Add static root path to tests
## Code Before: from __future__ import absolute_import import glob import os.path import logging import pytest from scss import Scss console = logging.StreamHandler() logger = logging.getLogger('scss') logger.setLevel(logging.ERROR) logger.addHandler(console) def test_pair_programmatic(scss_file_pair): scss_fn, css_fn, pytest_trigger = scss_file_pair if pytest_trigger: pytest_trigger() with open(scss_fn) as fh: source = fh.read() with open(css_fn) as fh: expected = fh.read() directory, _ = os.path.split(scss_fn) include_dir = os.path.join(directory, 'include') compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) actual = compiler.compile(source) # Normalize leading and trailing newlines actual = actual.strip('\n') expected = expected.strip('\n') assert expected == actual ## Instruction: Add static root path to tests ## Code After: from __future__ import absolute_import import glob import os.path import logging import pytest import scss console = logging.StreamHandler() logger = logging.getLogger('scss') logger.setLevel(logging.ERROR) logger.addHandler(console) def test_pair_programmatic(scss_file_pair): scss_fn, css_fn, pytest_trigger = scss_file_pair if pytest_trigger: pytest_trigger() with open(scss_fn) as fh: source = fh.read() with open(css_fn) as fh: expected = fh.read() directory, _ = os.path.split(scss_fn) include_dir = os.path.join(directory, 'include') scss.config.STATIC_ROOT = os.path.join(directory, 'static') compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) actual = compiler.compile(source) # Normalize leading and trailing newlines actual = actual.strip('\n') expected = expected.strip('\n') assert expected == actual
# ... existing code ... import scss # ... modified code ... include_dir = os.path.join(directory, 'include') scss.config.STATIC_ROOT = os.path.join(directory, 'static') compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir]) actual = compiler.compile(source) # ... rest of the code ...
3555b002aae386220bc02d662a9b188426afc08f
cmsplugin_facebook/cms_plugins.py
cmsplugin_facebook/cms_plugins.py
from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from cmsplugin_facebook import models class BasePlugin(CMSPluginBase): name = None def render(self, context, instance, placeholder): context.update({'instance': instance, 'name': self.name, 'url': instance.pageurl or \ context['request'].build_absolute_uri()}) return context class FacebookLikeBoxPlugin(BasePlugin): model = models.FacebookLikeBox name = 'Facebook Like Box' render_template = 'cmsplugin_facebook/likebox.html' change_form_template = 'cmsplugin_facebook/likebox_change_form.html' class FacebookLikeButtonPlugin(BasePlugin): model = models.FacebookLikeButton name = 'Facebook Like Button' render_template = 'cmsplugin_facebook/likebutton.html' change_form_template = 'cmsplugin_facebook/likebutton_change_form.html' plugin_pool.register_plugin(FacebookLikeBoxPlugin) plugin_pool.register_plugin(FacebookLikeButtonPlugin)
from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from cmsplugin_facebook import models class BasePlugin(CMSPluginBase): name = None def render(self, context, instance, placeholder): context.update({'instance': instance, 'name': self.name, 'url': instance.pageurl or \ context['request'].build_absolute_uri()}) return context class FacebookLikeBoxPlugin(BasePlugin): model = models.FacebookLikeBox name = 'Facebook Like Box' module = 'Facebook' render_template = 'cmsplugin_facebook/likebox.html' change_form_template = 'cmsplugin_facebook/likebox_change_form.html' class FacebookLikeButtonPlugin(BasePlugin): model = models.FacebookLikeButton name = 'Facebook Like Button' module = 'Facebook' render_template = 'cmsplugin_facebook/likebutton.html' change_form_template = 'cmsplugin_facebook/likebutton_change_form.html' plugin_pool.register_plugin(FacebookLikeBoxPlugin) plugin_pool.register_plugin(FacebookLikeButtonPlugin)
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
Python
bsd-3-clause
chrisglass/cmsplugin_facebook
from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from cmsplugin_facebook import models class BasePlugin(CMSPluginBase): name = None def render(self, context, instance, placeholder): context.update({'instance': instance, 'name': self.name, 'url': instance.pageurl or \ context['request'].build_absolute_uri()}) return context class FacebookLikeBoxPlugin(BasePlugin): model = models.FacebookLikeBox name = 'Facebook Like Box' + module = 'Facebook' render_template = 'cmsplugin_facebook/likebox.html' change_form_template = 'cmsplugin_facebook/likebox_change_form.html' class FacebookLikeButtonPlugin(BasePlugin): model = models.FacebookLikeButton name = 'Facebook Like Button' + module = 'Facebook' render_template = 'cmsplugin_facebook/likebutton.html' change_form_template = 'cmsplugin_facebook/likebutton_change_form.html' plugin_pool.register_plugin(FacebookLikeBoxPlugin) plugin_pool.register_plugin(FacebookLikeButtonPlugin)
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
## Code Before: from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from cmsplugin_facebook import models class BasePlugin(CMSPluginBase): name = None def render(self, context, instance, placeholder): context.update({'instance': instance, 'name': self.name, 'url': instance.pageurl or \ context['request'].build_absolute_uri()}) return context class FacebookLikeBoxPlugin(BasePlugin): model = models.FacebookLikeBox name = 'Facebook Like Box' render_template = 'cmsplugin_facebook/likebox.html' change_form_template = 'cmsplugin_facebook/likebox_change_form.html' class FacebookLikeButtonPlugin(BasePlugin): model = models.FacebookLikeButton name = 'Facebook Like Button' render_template = 'cmsplugin_facebook/likebutton.html' change_form_template = 'cmsplugin_facebook/likebutton_change_form.html' plugin_pool.register_plugin(FacebookLikeBoxPlugin) plugin_pool.register_plugin(FacebookLikeButtonPlugin) ## Instruction: Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins. ## Code After: from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from cmsplugin_facebook import models class BasePlugin(CMSPluginBase): name = None def render(self, context, instance, placeholder): context.update({'instance': instance, 'name': self.name, 'url': instance.pageurl or \ context['request'].build_absolute_uri()}) return context class FacebookLikeBoxPlugin(BasePlugin): model = models.FacebookLikeBox name = 'Facebook Like Box' module = 'Facebook' render_template = 'cmsplugin_facebook/likebox.html' change_form_template = 'cmsplugin_facebook/likebox_change_form.html' class FacebookLikeButtonPlugin(BasePlugin): model = models.FacebookLikeButton name = 'Facebook Like Button' module = 'Facebook' render_template = 'cmsplugin_facebook/likebutton.html' change_form_template = 'cmsplugin_facebook/likebutton_change_form.html' plugin_pool.register_plugin(FacebookLikeBoxPlugin) plugin_pool.register_plugin(FacebookLikeButtonPlugin)
# ... existing code ... name = 'Facebook Like Box' module = 'Facebook' render_template = 'cmsplugin_facebook/likebox.html' # ... modified code ... name = 'Facebook Like Button' module = 'Facebook' render_template = 'cmsplugin_facebook/likebutton.html' # ... rest of the code ...
d29410b39af1165ba520e7ecad7e6e9c36a7fd2f
test/test_basic.py
test/test_basic.py
import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor from api_keys import GoogleKeyLocker as Key Key = Key() def test_google_average(): result = searchcolor.google_average('Death', 10, Key.api(), Key.cse()) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255
import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor from api_keys import GoogleKeyLocker from api_keys import BingKeyLocker from api_keys import MSCSKeyLocker GKL = GoogleKeyLocker() BKL = BingKeyLocker() MSCSKL = MSCSKeyLocker() def test_google_average(): result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_bing_average(): result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_mscs_average(): result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255
Add tests for bing and mscs
Add tests for bing and mscs
Python
mit
Tathorack/searchcolor,Tathorack/searchcolor
import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor - from api_keys import GoogleKeyLocker as Key + from api_keys import GoogleKeyLocker + from api_keys import BingKeyLocker + from api_keys import MSCSKeyLocker - Key = Key() + GKL = GoogleKeyLocker() + BKL = BingKeyLocker() + MSCSKL = MSCSKeyLocker() def test_google_average(): - result = searchcolor.google_average('Death', 10, Key.api(), Key.cse()) + result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 + def test_bing_average(): + result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8) + assert result.get('name') == 'Death' + assert result.get('red') >= 0 and result.get('red') <= 255 + assert result.get('green') >= 0 and result.get('green') <= 255 + assert result.get('blue') >= 0 and result.get('blue') <= 255 + + def test_mscs_average(): + result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8) + assert result.get('name') == 'Death' + assert result.get('red') >= 0 and result.get('red') <= 255 + assert result.get('green') >= 0 and result.get('green') <= 255 + assert result.get('blue') >= 0 and result.get('blue') <= 255 +
Add tests for bing and mscs
## Code Before: import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor from api_keys import GoogleKeyLocker as Key Key = Key() def test_google_average(): result = searchcolor.google_average('Death', 10, Key.api(), Key.cse()) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 ## Instruction: Add tests for bing and mscs ## Code After: import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor from api_keys import GoogleKeyLocker from api_keys import BingKeyLocker from api_keys import MSCSKeyLocker GKL = GoogleKeyLocker() BKL = BingKeyLocker() MSCSKL = MSCSKeyLocker() def test_google_average(): result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_bing_average(): result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_mscs_average(): result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255
... from api_keys import GoogleKeyLocker from api_keys import BingKeyLocker from api_keys import MSCSKeyLocker GKL = GoogleKeyLocker() BKL = BingKeyLocker() MSCSKL = MSCSKeyLocker() ... def test_google_average(): result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8) assert result.get('name') == 'Death' ... assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_bing_average(): result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_mscs_average(): result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 ...
ebc4acb745287762cc8cb0a18fb97ed3e01c9ab0
mkerefuse/util.py
mkerefuse/util.py
from lxml import html class XPathObject(object): input_properties = {} """Dict of keys (property names) and XPaths (to read vals from)""" @classmethod def FromHTML(cls, html_contents): inst = cls() print("Reading through {b} bytes for {c} properties...".format( b=len(html_contents), c=len(cls.input_properties))) tree = html.fromstring(html_contents) for attr_name, xpath in cls.input_properties.items(): print("Searching for '{n}': {x}".format( n=attr_name, x=xpath)) elements = tree.xpath(xpath) if not len(elements): print("Failed to find '{n}': {x}".format( n=attr_name, x=xpath)) continue setattr( inst, attr_name, elements[0].text) return inst def __repr__(self): return json.dumps( self.__dict__, indent=4, separators=(',', ': '))
import json from lxml import html class XPathObject(object): input_properties = {} """Dict of keys (property names) and XPaths (to read vals from)""" @classmethod def FromHTML(cls, html_contents): inst = cls() print("Reading through {b} bytes for {c} properties...".format( b=len(html_contents), c=len(cls.input_properties))) tree = html.fromstring(html_contents) for attr_name, xpath in cls.input_properties.items(): print("Searching for '{n}': {x}".format( n=attr_name, x=xpath)) elements = tree.xpath(xpath) if not len(elements): print("Failed to find '{n}': {x}".format( n=attr_name, x=xpath)) continue setattr( inst, attr_name, elements[0].text) return inst def __repr__(self): return json.dumps( self.__dict__, indent=4, separators=(',', ': '))
Add json library for repr() calls
Add json library for repr() calls
Python
unlicense
tomislacker/python-mke-trash-pickup,tomislacker/python-mke-trash-pickup
+ import json from lxml import html class XPathObject(object): input_properties = {} """Dict of keys (property names) and XPaths (to read vals from)""" @classmethod def FromHTML(cls, html_contents): inst = cls() print("Reading through {b} bytes for {c} properties...".format( b=len(html_contents), c=len(cls.input_properties))) tree = html.fromstring(html_contents) for attr_name, xpath in cls.input_properties.items(): print("Searching for '{n}': {x}".format( n=attr_name, x=xpath)) elements = tree.xpath(xpath) if not len(elements): print("Failed to find '{n}': {x}".format( n=attr_name, x=xpath)) continue setattr( inst, attr_name, elements[0].text) return inst def __repr__(self): return json.dumps( self.__dict__, indent=4, separators=(',', ': '))
Add json library for repr() calls
## Code Before: from lxml import html class XPathObject(object): input_properties = {} """Dict of keys (property names) and XPaths (to read vals from)""" @classmethod def FromHTML(cls, html_contents): inst = cls() print("Reading through {b} bytes for {c} properties...".format( b=len(html_contents), c=len(cls.input_properties))) tree = html.fromstring(html_contents) for attr_name, xpath in cls.input_properties.items(): print("Searching for '{n}': {x}".format( n=attr_name, x=xpath)) elements = tree.xpath(xpath) if not len(elements): print("Failed to find '{n}': {x}".format( n=attr_name, x=xpath)) continue setattr( inst, attr_name, elements[0].text) return inst def __repr__(self): return json.dumps( self.__dict__, indent=4, separators=(',', ': ')) ## Instruction: Add json library for repr() calls ## Code After: import json from lxml import html class XPathObject(object): input_properties = {} """Dict of keys (property names) and XPaths (to read vals from)""" @classmethod def FromHTML(cls, html_contents): inst = cls() print("Reading through {b} bytes for {c} properties...".format( b=len(html_contents), c=len(cls.input_properties))) tree = html.fromstring(html_contents) for attr_name, xpath in cls.input_properties.items(): print("Searching for '{n}': {x}".format( n=attr_name, x=xpath)) elements = tree.xpath(xpath) if not len(elements): print("Failed to find '{n}': {x}".format( n=attr_name, x=xpath)) continue setattr( inst, attr_name, elements[0].text) return inst def __repr__(self): return json.dumps( self.__dict__, indent=4, separators=(',', ': '))
// ... existing code ... import json from lxml import html // ... rest of the code ...
94861438189537b88deaf8d04cc9942192038d8c
user_messages/views.py
user_messages/views.py
from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) if request.user == thread.to_user: thread.to_user_unread = False else: thread.from_user_unread = False thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
Update the read status of a thread when it's viewed
Update the read status of a thread when it's viewed
Python
mit
eldarion/user_messages,eldarion/user_messages,pinax/pinax-messages,arthur-wsw/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages
from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) + if request.user == thread.to_user: + thread.to_user_unread = False + else: + thread.from_user_unread = False + thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request)) - -
Update the read status of a thread when it's viewed
## Code Before: from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request)) ## Instruction: Update the read status of a thread when it's viewed ## Code After: from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) if request.user == thread.to_user: thread.to_user_unread = False else: thread.from_user_unread = False thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
// ... existing code ... thread = get_object_or_404(qs, pk=thread_id) if request.user == thread.to_user: thread.to_user_unread = False else: thread.from_user_unread = False thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request)) // ... rest of the code ...
8365945ef62b8f9cd37022302e8ee6299716720d
masterfirefoxos/base/helpers.py
masterfirefoxos/base/helpers.py
from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup static = register.function(static_helper) @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
Add static helper for jinja2
Add static helper for jinja2
Python
mpl-2.0
craigcook/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos,enng0227/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,enng0227/masterfirefoxos,mozilla/masterfirefoxos,glogiotatidis/masterfirefoxos,glogiotatidis/masterfirefoxos,liu21st/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,craigcook/masterfirefoxos,liu21st/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos
+ from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup + + + static = register.function(static_helper) @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
Add static helper for jinja2
## Code Before: from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request)) ## Instruction: Add static helper for jinja2 ## Code After: from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup static = register.function(static_helper) @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
... from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region ... static = register.function(static_helper) @register.function ...
32b51cb7d63d9d122c0d678a46d56a735a9bea3e
dodo_commands/framework/decorator_scope.py
dodo_commands/framework/decorator_scope.py
from dodo_commands.framework.singleton import Dodo # Resp: add the current command_name # to the list of commands decorated by decorator_name. class DecoratorScope: def __init__(self, decorator_name): self.decorators = Dodo.get_config('/ROOT').setdefault( 'decorators', {}).setdefault(decorator_name, []) def __enter__(self): # noqa self.decorators.append(Dodo.command_name) def __exit__(self, type, value, traceback): # noqa self.decorators.remove(Dodo.command_name)
from dodo_commands.framework.singleton import Dodo # Resp: add the current command_name # to the list of commands decorated by decorator_name. class DecoratorScope: def __init__(self, decorator_name, remove=False): self.decorators = Dodo.get_config('/ROOT').setdefault( 'decorators', {}).setdefault(decorator_name, []) self.prefix = "!" if remove else "" def __enter__(self): # noqa self.decorators.append(self.prefix + Dodo.command_name) def __exit__(self, type, value, traceback): # noqa self.decorators.remove(self.prefix + Dodo.command_name)
Add ``remove`` flag to DecoratorScope
Add ``remove`` flag to DecoratorScope
Python
mit
mnieber/dodo_commands
from dodo_commands.framework.singleton import Dodo # Resp: add the current command_name # to the list of commands decorated by decorator_name. class DecoratorScope: - def __init__(self, decorator_name): + def __init__(self, decorator_name, remove=False): self.decorators = Dodo.get_config('/ROOT').setdefault( 'decorators', {}).setdefault(decorator_name, []) + self.prefix = "!" if remove else "" def __enter__(self): # noqa - self.decorators.append(Dodo.command_name) + self.decorators.append(self.prefix + Dodo.command_name) def __exit__(self, type, value, traceback): # noqa - self.decorators.remove(Dodo.command_name) + self.decorators.remove(self.prefix + Dodo.command_name)
Add ``remove`` flag to DecoratorScope
## Code Before: from dodo_commands.framework.singleton import Dodo # Resp: add the current command_name # to the list of commands decorated by decorator_name. class DecoratorScope: def __init__(self, decorator_name): self.decorators = Dodo.get_config('/ROOT').setdefault( 'decorators', {}).setdefault(decorator_name, []) def __enter__(self): # noqa self.decorators.append(Dodo.command_name) def __exit__(self, type, value, traceback): # noqa self.decorators.remove(Dodo.command_name) ## Instruction: Add ``remove`` flag to DecoratorScope ## Code After: from dodo_commands.framework.singleton import Dodo # Resp: add the current command_name # to the list of commands decorated by decorator_name. class DecoratorScope: def __init__(self, decorator_name, remove=False): self.decorators = Dodo.get_config('/ROOT').setdefault( 'decorators', {}).setdefault(decorator_name, []) self.prefix = "!" if remove else "" def __enter__(self): # noqa self.decorators.append(self.prefix + Dodo.command_name) def __exit__(self, type, value, traceback): # noqa self.decorators.remove(self.prefix + Dodo.command_name)
... class DecoratorScope: def __init__(self, decorator_name, remove=False): self.decorators = Dodo.get_config('/ROOT').setdefault( ... 'decorators', {}).setdefault(decorator_name, []) self.prefix = "!" if remove else "" ... def __enter__(self): # noqa self.decorators.append(self.prefix + Dodo.command_name) ... def __exit__(self, type, value, traceback): # noqa self.decorators.remove(self.prefix + Dodo.command_name) ...
bb88b1d2e2c4d3eb482c3cf32d1a53c9e89f94cf
conftest.py
conftest.py
from __future__ import unicode_literals from django.db import connection def pytest_report_header(config): with connection.cursor() as cursor: cursor.execute("SELECT VERSION()") version = cursor.fetchone()[0] return "MySQL version: {}".format(version)
from __future__ import unicode_literals import django from django.db import connection def pytest_report_header(config): dot_version = '.'.join(str(x) for x in django.VERSION) header = "Django version: " + dot_version if hasattr(connection, '_nodb_connection'): with connection._nodb_connection.cursor() as cursor: cursor.execute("SELECT VERSION()") version = cursor.fetchone()[0] header += "\nMySQL version: {}".format(version) return header
Fix pytest version report when database does not exist, add Django version header
Fix pytest version report when database does not exist, add Django version header
Python
mit
nickmeharry/django-mysql,arnau126/django-mysql,arnau126/django-mysql,nickmeharry/django-mysql,adamchainz/django-mysql
from __future__ import unicode_literals + import django from django.db import connection def pytest_report_header(config): + dot_version = '.'.join(str(x) for x in django.VERSION) + header = "Django version: " + dot_version - with connection.cursor() as cursor: - cursor.execute("SELECT VERSION()") - version = cursor.fetchone()[0] - return "MySQL version: {}".format(version) + if hasattr(connection, '_nodb_connection'): + with connection._nodb_connection.cursor() as cursor: + cursor.execute("SELECT VERSION()") + version = cursor.fetchone()[0] + header += "\nMySQL version: {}".format(version) + + return header +
Fix pytest version report when database does not exist, add Django version header
## Code Before: from __future__ import unicode_literals from django.db import connection def pytest_report_header(config): with connection.cursor() as cursor: cursor.execute("SELECT VERSION()") version = cursor.fetchone()[0] return "MySQL version: {}".format(version) ## Instruction: Fix pytest version report when database does not exist, add Django version header ## Code After: from __future__ import unicode_literals import django from django.db import connection def pytest_report_header(config): dot_version = '.'.join(str(x) for x in django.VERSION) header = "Django version: " + dot_version if hasattr(connection, '_nodb_connection'): with connection._nodb_connection.cursor() as cursor: cursor.execute("SELECT VERSION()") version = cursor.fetchone()[0] header += "\nMySQL version: {}".format(version) return header
// ... existing code ... import django from django.db import connection // ... modified code ... def pytest_report_header(config): dot_version = '.'.join(str(x) for x in django.VERSION) header = "Django version: " + dot_version if hasattr(connection, '_nodb_connection'): with connection._nodb_connection.cursor() as cursor: cursor.execute("SELECT VERSION()") version = cursor.fetchone()[0] header += "\nMySQL version: {}".format(version) return header // ... rest of the code ...
7c0c349656e6f02be0f3f0044f5d225f3688be08
bong/parse_args.py
bong/parse_args.py
from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version=VERSION, help='Show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, help='Time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', help='Time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', help='Time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='Timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='Message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message)
from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version='%(prog)s {}'.format(VERSION), help='show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, help='time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', help='time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', help='time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message)
Clean up the argument parsing
Clean up the argument parsing
Python
mit
prophile/bong
from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) - PARSER.add_argument('-V', '--version', action='version', version=VERSION, + PARSER.add_argument('-V', '--version', action='version', + version='%(prog)s {}'.format(VERSION), - help='Show version') + help='show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, - help='Time for a Pomodoro system short break') + help='time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', - help='Time for a Pomodoro system long break') + help='time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', - help='Time for a Pomodoro system single Pomodoro') + help='time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', - help='Timer length, in minutes') + help='timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, - help='Message to display in the notifier') + help='message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message)
Clean up the argument parsing
## Code Before: from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version=VERSION, help='Show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, help='Time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', help='Time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', help='Time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='Timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='Message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message) ## Instruction: Clean up the argument parsing ## Code After: from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version='%(prog)s {}'.format(VERSION), help='show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, help='time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', help='time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', help='time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message)
// ... existing code ... PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version='%(prog)s {}'.format(VERSION), help='show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, // ... modified code ... dest='minutes', default=25, help='time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', ... const=15, dest='minutes', help='time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', ... const=25, dest='minutes', help='time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='message to display in the notifier') // ... rest of the code ...
e9674f88660e14ce48239771b76310044fc37090
erpnext/patches/v7_0/remove_old_earning_deduction_doctypes.py
erpnext/patches/v7_0/remove_old_earning_deduction_doctypes.py
from __future__ import unicode_literals import frappe def execute(): if frappe.db.exists("DocType", "Salary Component"): for dt in ("Salary Structure Earning", "Salary Structure Deduction", "Salary Slip Earning", "Salary Slip Deduction", "Earning Type", "Deduction Type"): frappe.delete_doc("DocType", dt)
from __future__ import unicode_literals import frappe def execute(): if frappe.db.exists("DocType", "Salary Component"): for dt in ("Salary Structure Earning", "Salary Structure Deduction", "Salary Slip Earning", "Salary Slip Deduction", "Earning Type", "Deduction Type"): frappe.delete_doc("DocType", dt) for d in frappe.db.sql("""select name from `tabCustom Field` where dt in ('Salary Detail', 'Salary Component')"""): frappe.get_doc("Custom Field", d[0]).save()
Create columns for custom fields in new table Salary Detail and Component
Create columns for custom fields in new table Salary Detail and Component
Python
agpl-3.0
Aptitudetech/ERPNext,njmube/erpnext,geekroot/erpnext,geekroot/erpnext,gsnbng/erpnext,gsnbng/erpnext,geekroot/erpnext,geekroot/erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/erpnext,indictranstech/erpnext,njmube/erpnext,njmube/erpnext,indictranstech/erpnext,gsnbng/erpnext,njmube/erpnext
from __future__ import unicode_literals import frappe def execute(): if frappe.db.exists("DocType", "Salary Component"): for dt in ("Salary Structure Earning", "Salary Structure Deduction", "Salary Slip Earning", "Salary Slip Deduction", "Earning Type", "Deduction Type"): frappe.delete_doc("DocType", dt) - + + + for d in frappe.db.sql("""select name from `tabCustom Field` + where dt in ('Salary Detail', 'Salary Component')"""): + frappe.get_doc("Custom Field", d[0]).save()
Create columns for custom fields in new table Salary Detail and Component
## Code Before: from __future__ import unicode_literals import frappe def execute(): if frappe.db.exists("DocType", "Salary Component"): for dt in ("Salary Structure Earning", "Salary Structure Deduction", "Salary Slip Earning", "Salary Slip Deduction", "Earning Type", "Deduction Type"): frappe.delete_doc("DocType", dt) ## Instruction: Create columns for custom fields in new table Salary Detail and Component ## Code After: from __future__ import unicode_literals import frappe def execute(): if frappe.db.exists("DocType", "Salary Component"): for dt in ("Salary Structure Earning", "Salary Structure Deduction", "Salary Slip Earning", "Salary Slip Deduction", "Earning Type", "Deduction Type"): frappe.delete_doc("DocType", dt) for d in frappe.db.sql("""select name from `tabCustom Field` where dt in ('Salary Detail', 'Salary Component')"""): frappe.get_doc("Custom Field", d[0]).save()
# ... existing code ... frappe.delete_doc("DocType", dt) for d in frappe.db.sql("""select name from `tabCustom Field` where dt in ('Salary Detail', 'Salary Component')"""): frappe.get_doc("Custom Field", d[0]).save() # ... rest of the code ...
384d57efa59665f0dd47c07062a8177a2eedde9a
run_tests.py
run_tests.py
import optparse import sys # Install the Python unittest2 package before you run this script. import unittest2 USAGE = """%prog SDK_PATH Run unit tests for App Engine apps. The SDK Path is probably /usr/local/google_appengine on Mac OS SDK_PATH Path to the SDK installation""" def main(sdk_path, test_pattern): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() suite = unittest2.loader.TestLoader().discover("tests", test_pattern) tests = unittest2.TextTestRunner(verbosity=2).run(suite) if tests.wasSuccessful() == True: sys.exit(0) else: sys.exit(1) if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) < 1: print 'Warning: Trying default SDK path.' sdk_path = "/usr/local/google_appengine" else: sdk_path = args[0] test_pattern = "test*.py" if len(args) > 1: test_pattern = args[1] main(sdk_path, test_pattern)
import optparse import sys import warnings # Install the Python unittest2 package before you run this script. import unittest2 USAGE = """%prog SDK_PATH Run unit tests for App Engine apps. The SDK Path is probably /usr/local/google_appengine on Mac OS SDK_PATH Path to the SDK installation""" def main(sdk_path, test_pattern): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() suite = unittest2.loader.TestLoader().discover("tests", test_pattern) tests = unittest2.TextTestRunner(verbosity=2).run(suite) if tests.wasSuccessful() == True: sys.exit(0) else: sys.exit(1) if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) < 1: warnings.warn('Trying default SDK path.', RuntimeWarning) sdk_path = "/usr/local/google_appengine" else: sdk_path = args[0] test_pattern = "test*.py" if len(args) > 1: test_pattern = args[1] main(sdk_path, test_pattern)
Replace print statement with `warnings.warn`.
Replace print statement with `warnings.warn`. Also so that it doesn't need to be converted for Python3 compat.
Python
mit
verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance
import optparse import sys + import warnings + # Install the Python unittest2 package before you run this script. import unittest2 USAGE = """%prog SDK_PATH Run unit tests for App Engine apps. The SDK Path is probably /usr/local/google_appengine on Mac OS SDK_PATH Path to the SDK installation""" def main(sdk_path, test_pattern): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() suite = unittest2.loader.TestLoader().discover("tests", test_pattern) tests = unittest2.TextTestRunner(verbosity=2).run(suite) if tests.wasSuccessful() == True: sys.exit(0) else: sys.exit(1) if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) < 1: - print 'Warning: Trying default SDK path.' + warnings.warn('Trying default SDK path.', RuntimeWarning) sdk_path = "/usr/local/google_appengine" else: sdk_path = args[0] test_pattern = "test*.py" if len(args) > 1: test_pattern = args[1] main(sdk_path, test_pattern)
Replace print statement with `warnings.warn`.
## Code Before: import optparse import sys # Install the Python unittest2 package before you run this script. import unittest2 USAGE = """%prog SDK_PATH Run unit tests for App Engine apps. The SDK Path is probably /usr/local/google_appengine on Mac OS SDK_PATH Path to the SDK installation""" def main(sdk_path, test_pattern): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() suite = unittest2.loader.TestLoader().discover("tests", test_pattern) tests = unittest2.TextTestRunner(verbosity=2).run(suite) if tests.wasSuccessful() == True: sys.exit(0) else: sys.exit(1) if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) < 1: print 'Warning: Trying default SDK path.' sdk_path = "/usr/local/google_appengine" else: sdk_path = args[0] test_pattern = "test*.py" if len(args) > 1: test_pattern = args[1] main(sdk_path, test_pattern) ## Instruction: Replace print statement with `warnings.warn`. ## Code After: import optparse import sys import warnings # Install the Python unittest2 package before you run this script. import unittest2 USAGE = """%prog SDK_PATH Run unit tests for App Engine apps. The SDK Path is probably /usr/local/google_appengine on Mac OS SDK_PATH Path to the SDK installation""" def main(sdk_path, test_pattern): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() suite = unittest2.loader.TestLoader().discover("tests", test_pattern) tests = unittest2.TextTestRunner(verbosity=2).run(suite) if tests.wasSuccessful() == True: sys.exit(0) else: sys.exit(1) if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) < 1: warnings.warn('Trying default SDK path.', RuntimeWarning) sdk_path = "/usr/local/google_appengine" else: sdk_path = args[0] test_pattern = "test*.py" if len(args) > 1: test_pattern = args[1] main(sdk_path, test_pattern)
// ... existing code ... import sys import warnings # Install the Python unittest2 package before you run this script. // ... modified code ... if len(args) < 1: warnings.warn('Trying default SDK path.', RuntimeWarning) sdk_path = "/usr/local/google_appengine" // ... rest of the code ...
c3eac81cffbfbb2cc00629d6c773e7b2e985d071
cider/_lib.py
cider/_lib.py
def lazyproperty(fn): @property def _lazyproperty(self): attr = "_" + fn.__name__ if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazyproperty
from functools import wraps def lazyproperty(fn): @property @wraps(fn) def _lazyproperty(self): attr = "_" + fn.__name__ if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazyproperty
Fix lazyproperty decorator to preserve property attribute
Fix lazyproperty decorator to preserve property attribute
Python
mit
msanders/cider
+ from functools import wraps + def lazyproperty(fn): @property + @wraps(fn) def _lazyproperty(self): attr = "_" + fn.__name__ if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazyproperty
Fix lazyproperty decorator to preserve property attribute
## Code Before: def lazyproperty(fn): @property def _lazyproperty(self): attr = "_" + fn.__name__ if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazyproperty ## Instruction: Fix lazyproperty decorator to preserve property attribute ## Code After: from functools import wraps def lazyproperty(fn): @property @wraps(fn) def _lazyproperty(self): attr = "_" + fn.__name__ if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazyproperty
... from functools import wraps def lazyproperty(fn): ... @property @wraps(fn) def _lazyproperty(self): ...
9ff92d0a437e5af08fbf996ed0e3362cbd9cf2c9
tests/instrumentdb_test.py
tests/instrumentdb_test.py
'Test the functions in the instrumentdb module.' import os.path import unittest as ut import stripeline.instrumentdb as idb class TestInstrumentDb(ut.TestCase): def test_paths(self): self.assertTrue(os.path.exists(idb.instrument_db_path())) self.assertTrue(os.path.exists(idb.focal_plane_db_file_name())) self.assertTrue(os.path.exists(idb.detector_db_file_name())) self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
'Test the functions in the instrumentdb module.' import os.path import unittest as ut import stripeline.instrumentdb as idb class TestInstrumentDb(ut.TestCase): def test_paths(self): self.assertTrue(os.path.exists(idb.instrument_db_path()), 'Path "{0}" not found'.format(idb.instrument_db_path())) for file_name in (idb.focal_plane_db_file_name(), idb.detector_db_file_name(), idb.scanning_strategy_db_file_name()): self.assertTrue(os.path.exists(file_name), 'File "{0}" not found'.format(file_name))
Print more helpful messages when tests fail
Print more helpful messages when tests fail
Python
mit
ziotom78/stripeline,ziotom78/stripeline
'Test the functions in the instrumentdb module.' import os.path import unittest as ut import stripeline.instrumentdb as idb class TestInstrumentDb(ut.TestCase): def test_paths(self): - self.assertTrue(os.path.exists(idb.instrument_db_path())) + self.assertTrue(os.path.exists(idb.instrument_db_path()), + 'Path "{0}" not found'.format(idb.instrument_db_path())) - self.assertTrue(os.path.exists(idb.focal_plane_db_file_name())) + for file_name in (idb.focal_plane_db_file_name(), + idb.detector_db_file_name(), + idb.scanning_strategy_db_file_name()): - self.assertTrue(os.path.exists(idb.detector_db_file_name())) + self.assertTrue(os.path.exists(file_name), - self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name())) + 'File "{0}" not found'.format(file_name))
Print more helpful messages when tests fail
## Code Before: 'Test the functions in the instrumentdb module.' import os.path import unittest as ut import stripeline.instrumentdb as idb class TestInstrumentDb(ut.TestCase): def test_paths(self): self.assertTrue(os.path.exists(idb.instrument_db_path())) self.assertTrue(os.path.exists(idb.focal_plane_db_file_name())) self.assertTrue(os.path.exists(idb.detector_db_file_name())) self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name())) ## Instruction: Print more helpful messages when tests fail ## Code After: 'Test the functions in the instrumentdb module.' import os.path import unittest as ut import stripeline.instrumentdb as idb class TestInstrumentDb(ut.TestCase): def test_paths(self): self.assertTrue(os.path.exists(idb.instrument_db_path()), 'Path "{0}" not found'.format(idb.instrument_db_path())) for file_name in (idb.focal_plane_db_file_name(), idb.detector_db_file_name(), idb.scanning_strategy_db_file_name()): self.assertTrue(os.path.exists(file_name), 'File "{0}" not found'.format(file_name))
// ... existing code ... def test_paths(self): self.assertTrue(os.path.exists(idb.instrument_db_path()), 'Path "{0}" not found'.format(idb.instrument_db_path())) for file_name in (idb.focal_plane_db_file_name(), idb.detector_db_file_name(), idb.scanning_strategy_db_file_name()): self.assertTrue(os.path.exists(file_name), 'File "{0}" not found'.format(file_name)) // ... rest of the code ...
a797f4862ccfdb84ff87f0f64a6abdc405823215
tests/app/na_celery/test_email_tasks.py
tests/app/na_celery/test_email_tasks.py
from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email') send_emails(sample_email.id) assert mock_send_email.call_args[0][0] == '[email protected]' assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass
from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200) send_emails(sample_email.id) assert mock_send_email.call_args[0][0] == sample_member.email assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass
Update email task test for members
Update email task test for members
Python
mit
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: - def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email): + def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member): - mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email') + mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200) send_emails(sample_email.id) - assert mock_send_email.call_args[0][0] == '[email protected]' + assert mock_send_email.call_args[0][0] == sample_member.email assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass
Update email task test for members
## Code Before: from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email') send_emails(sample_email.id) assert mock_send_email.call_args[0][0] == '[email protected]' assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass ## Instruction: Update email task test for members ## Code After: from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200) send_emails(sample_email.id) assert mock_send_email.call_args[0][0] == sample_member.email assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass
... def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200) send_emails(sample_email.id) ... assert mock_send_email.call_args[0][0] == sample_member.email assert mock_send_email.call_args[0][1] == 'workshop: test title' ...
49ea86d93d75afb1c3a3f95dd72a78b6d78f04cc
sitecustomize.py
sitecustomize.py
import sys import os from combinator.branchmgr import theBranchManager theBranchManager.addPaths() for key in sys.modules.keys(): # Unload all Combinator modules that had to be loaded in order to call # addPaths(). Although the very very beginning of this script needs to # load the trunk combinator (or whichever one your shell points at), once # the path has been set up, newer versions of combinator may be used; for # example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all # import Combinator from the current Divmod branch. This is especially # required so that Combinator's tests can be run on the currently-active # Combinator rather than the one responsible for setting up the # environment. if key == 'combinator' or key.startswith('combinator'): del sys.modules[key] # Install stuff as a user, by default. if sys.platform != 'darwin': # For use with setup.py... if sys.platform.startswith('win'): execprefix = os.path.abspath(os.path.expanduser("~/Python")) else: # Don't exactly know how Darwin fits in here - I think distutils is # buggy...? execprefix = os.path.abspath(os.path.expanduser("~/.local")) import sys class DistSysProxy: def __getattr__(self, attr): if attr in ('prefix', 'exec_prefix'): return execprefix else: return getattr(sys, attr) sys.modules['distutils.command.sys'] = DistSysProxy()
import sys import os from combinator.branchmgr import theBranchManager theBranchManager.addPaths() for key in sys.modules.keys(): # Unload all Combinator modules that had to be loaded in order to call # addPaths(). Although the very very beginning of this script needs to # load the trunk combinator (or whichever one your shell points at), once # the path has been set up, newer versions of combinator may be used; for # example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all # import Combinator from the current Divmod branch. This is especially # required so that Combinator's tests can be run on the currently-active # Combinator rather than the one responsible for setting up the # environment. if key == 'combinator' or key.startswith('combinator'): del sys.modules[key]
Remove distutils-mangling code from Combinator which breaks setuptools.
Remove distutils-mangling code from Combinator which breaks setuptools. After this change, Combinator will no longer attempt to force 'python setup.py install' to put things into your home directory. Use `setup.py --prefix ~/.local`, or, if your package is trying to use setuptools, `python setup.py --site-dirs ~/.local/lib/python2.5/site-packages --prefix ~/.local install`. Author: glyph Reviewer: dried Fixes #493
Python
mit
habnabit/Combinator,habnabit/Combinator
import sys import os from combinator.branchmgr import theBranchManager theBranchManager.addPaths() for key in sys.modules.keys(): # Unload all Combinator modules that had to be loaded in order to call # addPaths(). Although the very very beginning of this script needs to # load the trunk combinator (or whichever one your shell points at), once # the path has been set up, newer versions of combinator may be used; for # example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all # import Combinator from the current Divmod branch. This is especially # required so that Combinator's tests can be run on the currently-active # Combinator rather than the one responsible for setting up the # environment. if key == 'combinator' or key.startswith('combinator'): del sys.modules[key] - # Install stuff as a user, by default. - if sys.platform != 'darwin': - # For use with setup.py... - - if sys.platform.startswith('win'): - execprefix = os.path.abspath(os.path.expanduser("~/Python")) - else: - # Don't exactly know how Darwin fits in here - I think distutils is - # buggy...? - execprefix = os.path.abspath(os.path.expanduser("~/.local")) - - import sys - - class DistSysProxy: - def __getattr__(self, attr): - if attr in ('prefix', 'exec_prefix'): - return execprefix - else: - return getattr(sys, attr) - - sys.modules['distutils.command.sys'] = DistSysProxy() -
Remove distutils-mangling code from Combinator which breaks setuptools.
## Code Before: import sys import os from combinator.branchmgr import theBranchManager theBranchManager.addPaths() for key in sys.modules.keys(): # Unload all Combinator modules that had to be loaded in order to call # addPaths(). Although the very very beginning of this script needs to # load the trunk combinator (or whichever one your shell points at), once # the path has been set up, newer versions of combinator may be used; for # example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all # import Combinator from the current Divmod branch. This is especially # required so that Combinator's tests can be run on the currently-active # Combinator rather than the one responsible for setting up the # environment. if key == 'combinator' or key.startswith('combinator'): del sys.modules[key] # Install stuff as a user, by default. if sys.platform != 'darwin': # For use with setup.py... if sys.platform.startswith('win'): execprefix = os.path.abspath(os.path.expanduser("~/Python")) else: # Don't exactly know how Darwin fits in here - I think distutils is # buggy...? execprefix = os.path.abspath(os.path.expanduser("~/.local")) import sys class DistSysProxy: def __getattr__(self, attr): if attr in ('prefix', 'exec_prefix'): return execprefix else: return getattr(sys, attr) sys.modules['distutils.command.sys'] = DistSysProxy() ## Instruction: Remove distutils-mangling code from Combinator which breaks setuptools. ## Code After: import sys import os from combinator.branchmgr import theBranchManager theBranchManager.addPaths() for key in sys.modules.keys(): # Unload all Combinator modules that had to be loaded in order to call # addPaths(). Although the very very beginning of this script needs to # load the trunk combinator (or whichever one your shell points at), once # the path has been set up, newer versions of combinator may be used; for # example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all # import Combinator from the current Divmod branch. This is especially # required so that Combinator's tests can be run on the currently-active # Combinator rather than the one responsible for setting up the # environment. if key == 'combinator' or key.startswith('combinator'): del sys.modules[key]
... ...
936302bf5db057a01644014aabc1357f925c6afa
mezzanine/accounts/models.py
mezzanine/accounts/models.py
from django.db import DatabaseError, connection from django.db.models.signals import post_save from mezzanine.accounts import get_profile_for_user from mezzanine.conf import settings __all__ = () if getattr(settings, "AUTH_PROFILE_MODULE", None): def create_profile(user_model, instance, created, **kwargs): if created: try: get_profile_for_user(instance) except DatabaseError: # User creation in initial syncdb may have been triggered, # while profile model is under migration management and # doesn't exist yet. We close the connection so that it # gets re-opened, allowing syncdb to continue and complete. connection.close() post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
from django.db import DatabaseError, connection from django.db.models.signals import post_save from mezzanine.accounts import get_profile_for_user from mezzanine.conf import settings __all__ = () if getattr(settings, "AUTH_PROFILE_MODULE", None): def create_profile(**kwargs): if kwargs["created"]: try: get_profile_for_user(kwargs["instance"]) except DatabaseError: # User creation in initial syncdb may have been triggered, # while profile model is under migration management and # doesn't exist yet. We close the connection so that it # gets re-opened, allowing syncdb to continue and complete. connection.close() post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
Fix user profile signal handler.
Fix user profile signal handler.
Python
bsd-2-clause
wbtuomela/mezzanine,Cicero-Zhao/mezzanine,gradel/mezzanine,christianwgd/mezzanine,eino-makitalo/mezzanine,frankier/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,frankier/mezzanine,readevalprint/mezzanine,dsanders11/mezzanine,stephenmcd/mezzanine,frankier/mezzanine,eino-makitalo/mezzanine,ryneeverett/mezzanine,viaregio/mezzanine,wbtuomela/mezzanine,readevalprint/mezzanine,dsanders11/mezzanine,douglaskastle/mezzanine,ryneeverett/mezzanine,vladir/mezzanine,stephenmcd/mezzanine,douglaskastle/mezzanine,vladir/mezzanine,christianwgd/mezzanine,sjdines/mezzanine,sjuxax/mezzanine,jerivas/mezzanine,stephenmcd/mezzanine,sjdines/mezzanine,readevalprint/mezzanine,viaregio/mezzanine,gradel/mezzanine,eino-makitalo/mezzanine,gradel/mezzanine,spookylukey/mezzanine,ryneeverett/mezzanine,viaregio/mezzanine,sjuxax/mezzanine,vladir/mezzanine,molokov/mezzanine,dsanders11/mezzanine,jerivas/mezzanine,spookylukey/mezzanine,sjuxax/mezzanine,douglaskastle/mezzanine,christianwgd/mezzanine,molokov/mezzanine,sjdines/mezzanine,molokov/mezzanine,spookylukey/mezzanine,wbtuomela/mezzanine
from django.db import DatabaseError, connection from django.db.models.signals import post_save from mezzanine.accounts import get_profile_for_user from mezzanine.conf import settings __all__ = () if getattr(settings, "AUTH_PROFILE_MODULE", None): - def create_profile(user_model, instance, created, **kwargs): + def create_profile(**kwargs): - if created: + if kwargs["created"]: try: - get_profile_for_user(instance) + get_profile_for_user(kwargs["instance"]) except DatabaseError: # User creation in initial syncdb may have been triggered, # while profile model is under migration management and # doesn't exist yet. We close the connection so that it # gets re-opened, allowing syncdb to continue and complete. connection.close() post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
Fix user profile signal handler.
## Code Before: from django.db import DatabaseError, connection from django.db.models.signals import post_save from mezzanine.accounts import get_profile_for_user from mezzanine.conf import settings __all__ = () if getattr(settings, "AUTH_PROFILE_MODULE", None): def create_profile(user_model, instance, created, **kwargs): if created: try: get_profile_for_user(instance) except DatabaseError: # User creation in initial syncdb may have been triggered, # while profile model is under migration management and # doesn't exist yet. We close the connection so that it # gets re-opened, allowing syncdb to continue and complete. connection.close() post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False) ## Instruction: Fix user profile signal handler. ## Code After: from django.db import DatabaseError, connection from django.db.models.signals import post_save from mezzanine.accounts import get_profile_for_user from mezzanine.conf import settings __all__ = () if getattr(settings, "AUTH_PROFILE_MODULE", None): def create_profile(**kwargs): if kwargs["created"]: try: get_profile_for_user(kwargs["instance"]) except DatabaseError: # User creation in initial syncdb may have been triggered, # while profile model is under migration management and # doesn't exist yet. We close the connection so that it # gets re-opened, allowing syncdb to continue and complete. connection.close() post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
# ... existing code ... def create_profile(**kwargs): if kwargs["created"]: try: get_profile_for_user(kwargs["instance"]) except DatabaseError: # ... rest of the code ...
836fd354037a6aca6898b41a9d62ada31f1ee6ba
rasterio/tool.py
rasterio/tool.py
import code import collections import logging import sys try: import matplotlib.pyplot as plt except ImportError: plt = None import numpy import rasterio logger = logging.getLogger('rasterio') Stats = collections.namedtuple('Stats', ['min', 'max', 'mean']) # Collect dictionary of functions for use in the interpreter in main() funcs = locals() def show(source, cmap='gray'): """Show a raster using matplotlib. The raster may be either an ndarray or a (dataset, bidx) tuple. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source if plt is not None: plt.imshow(arr, cmap=cmap) plt.show() else: raise ImportError("matplotlib could not be imported") def stats(source): """Return a tuple with raster min, max, and mean. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr)) def main(banner, dataset): """ Main entry point for use with IPython interpreter """ import IPython locals = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt) IPython.start_ipython(argv=[], user_ns=locals) return 0
import code import collections import logging import sys try: import matplotlib.pyplot as plt except ImportError: plt = None import numpy import rasterio logger = logging.getLogger('rasterio') Stats = collections.namedtuple('Stats', ['min', 'max', 'mean']) # Collect dictionary of functions for use in the interpreter in main() funcs = locals() def show(source, cmap='gray'): """Show a raster using matplotlib. The raster may be either an ndarray or a (dataset, bidx) tuple. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source if plt is not None: plt.imshow(arr, cmap=cmap) plt.show() else: raise ImportError("matplotlib could not be imported") def stats(source): """Return a tuple with raster min, max, and mean. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr)) def main(banner, dataset): """ Main entry point for use with IPython interpreter """ import IPython locals = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt) IPython.InteractiveShell.banner1 = banner IPython.start_ipython(argv=[], user_ns=locals) return 0
Print the banner in IPython
Print the banner in IPython
Python
bsd-3-clause
clembou/rasterio,brendan-ward/rasterio,kapadia/rasterio,clembou/rasterio,njwilson23/rasterio,perrygeo/rasterio,brendan-ward/rasterio,perrygeo/rasterio,youngpm/rasterio,johanvdw/rasterio,clembou/rasterio,kapadia/rasterio,njwilson23/rasterio,perrygeo/rasterio,youngpm/rasterio,brendan-ward/rasterio,njwilson23/rasterio,johanvdw/rasterio,kapadia/rasterio,johanvdw/rasterio,youngpm/rasterio
import code import collections import logging import sys try: import matplotlib.pyplot as plt except ImportError: plt = None import numpy import rasterio logger = logging.getLogger('rasterio') Stats = collections.namedtuple('Stats', ['min', 'max', 'mean']) # Collect dictionary of functions for use in the interpreter in main() funcs = locals() def show(source, cmap='gray'): """Show a raster using matplotlib. The raster may be either an ndarray or a (dataset, bidx) tuple. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source if plt is not None: plt.imshow(arr, cmap=cmap) plt.show() else: raise ImportError("matplotlib could not be imported") def stats(source): """Return a tuple with raster min, max, and mean. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr)) def main(banner, dataset): """ Main entry point for use with IPython interpreter """ import IPython locals = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt) + IPython.InteractiveShell.banner1 = banner IPython.start_ipython(argv=[], user_ns=locals) return 0
Print the banner in IPython
## Code Before: import code import collections import logging import sys try: import matplotlib.pyplot as plt except ImportError: plt = None import numpy import rasterio logger = logging.getLogger('rasterio') Stats = collections.namedtuple('Stats', ['min', 'max', 'mean']) # Collect dictionary of functions for use in the interpreter in main() funcs = locals() def show(source, cmap='gray'): """Show a raster using matplotlib. The raster may be either an ndarray or a (dataset, bidx) tuple. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source if plt is not None: plt.imshow(arr, cmap=cmap) plt.show() else: raise ImportError("matplotlib could not be imported") def stats(source): """Return a tuple with raster min, max, and mean. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr)) def main(banner, dataset): """ Main entry point for use with IPython interpreter """ import IPython locals = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt) IPython.start_ipython(argv=[], user_ns=locals) return 0 ## Instruction: Print the banner in IPython ## Code After: import code import collections import logging import sys try: import matplotlib.pyplot as plt except ImportError: plt = None import numpy import rasterio logger = logging.getLogger('rasterio') Stats = collections.namedtuple('Stats', ['min', 'max', 'mean']) # Collect dictionary of functions for use in the interpreter in main() funcs = locals() def show(source, cmap='gray'): """Show a raster using matplotlib. The raster may be either an ndarray or a (dataset, bidx) tuple. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source if plt is not None: plt.imshow(arr, cmap=cmap) plt.show() else: raise ImportError("matplotlib could not be imported") def stats(source): """Return a tuple with raster min, max, and mean. """ if isinstance(source, tuple): arr = source[0].read(source[1]) else: arr = source return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr)) def main(banner, dataset): """ Main entry point for use with IPython interpreter """ import IPython locals = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt) IPython.InteractiveShell.banner1 = banner IPython.start_ipython(argv=[], user_ns=locals) return 0
... locals = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt) IPython.InteractiveShell.banner1 = banner IPython.start_ipython(argv=[], user_ns=locals) ...
26598254cd48a716527eb4689ad96551c5a39790
ksp_login/__init__.py
ksp_login/__init__.py
__version__ = '0.6.0' __version_info__ = __version__.split('.') from django.utils.translation import ugettext_lazy as _ def __activate_social_auth_monkeypatch(): from social_core.backends.base import BaseAuth from social_core.backends.open_id import (OPENID_ID_FIELD, OpenIdAuth) from social_core.backends.livejournal import LiveJournalOpenId from social_core.backends.yahoo import YahooOpenId from social_core.backends.google import GoogleOpenId from social_core.backends.yandex import YandexOpenId BaseAuth.REQUIRED_FIELD_NAME = None BaseAuth.REQUIRED_FIELD_VERBOSE_NAME = None OpenIdAuth.REQUIRED_FIELD_NAME = OPENID_ID_FIELD OpenIdAuth.REQUIRED_FIELD_VERBOSE_NAME = _('OpenID identity') LiveJournalOpenId.REQUIRED_FIELD_NAME = 'openid_lj_user' LiveJournalOpenId.REQUIRED_FIELD_VERBOSE_NAME = _('LiveJournal username') # Reset to None in those OpenID backends where nothing is required. GoogleOpenId.REQUIRED_FIELD_NAME = None GoogleOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YahooOpenId.REQUIRED_FIELD_NAME = None YahooOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YandexOpenId.REQUIRED_FIELD_NAME = None YandexOpenId.REQUIRED_FIELD_VERBOSE_NAME = None __activate_social_auth_monkeypatch()
__version__ = '0.6.0' __version_info__ = tuple(map(int, __version__.split('.'))) from django.utils.translation import ugettext_lazy as _ def __activate_social_auth_monkeypatch(): from social_core.backends.base import BaseAuth from social_core.backends.open_id import (OPENID_ID_FIELD, OpenIdAuth) from social_core.backends.livejournal import LiveJournalOpenId from social_core.backends.yahoo import YahooOpenId from social_core.backends.google import GoogleOpenId from social_core.backends.yandex import YandexOpenId BaseAuth.REQUIRED_FIELD_NAME = None BaseAuth.REQUIRED_FIELD_VERBOSE_NAME = None OpenIdAuth.REQUIRED_FIELD_NAME = OPENID_ID_FIELD OpenIdAuth.REQUIRED_FIELD_VERBOSE_NAME = _('OpenID identity') LiveJournalOpenId.REQUIRED_FIELD_NAME = 'openid_lj_user' LiveJournalOpenId.REQUIRED_FIELD_VERBOSE_NAME = _('LiveJournal username') # Reset to None in those OpenID backends where nothing is required. GoogleOpenId.REQUIRED_FIELD_NAME = None GoogleOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YahooOpenId.REQUIRED_FIELD_NAME = None YahooOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YandexOpenId.REQUIRED_FIELD_NAME = None YandexOpenId.REQUIRED_FIELD_VERBOSE_NAME = None __activate_social_auth_monkeypatch()
Make version info tuple of ints.
Make version info tuple of ints.
Python
bsd-3-clause
koniiiik/ksp_login,koniiiik/ksp_login,koniiiik/ksp_login
__version__ = '0.6.0' - __version_info__ = __version__.split('.') + __version_info__ = tuple(map(int, __version__.split('.'))) from django.utils.translation import ugettext_lazy as _ def __activate_social_auth_monkeypatch(): from social_core.backends.base import BaseAuth from social_core.backends.open_id import (OPENID_ID_FIELD, OpenIdAuth) from social_core.backends.livejournal import LiveJournalOpenId from social_core.backends.yahoo import YahooOpenId from social_core.backends.google import GoogleOpenId from social_core.backends.yandex import YandexOpenId BaseAuth.REQUIRED_FIELD_NAME = None BaseAuth.REQUIRED_FIELD_VERBOSE_NAME = None OpenIdAuth.REQUIRED_FIELD_NAME = OPENID_ID_FIELD OpenIdAuth.REQUIRED_FIELD_VERBOSE_NAME = _('OpenID identity') LiveJournalOpenId.REQUIRED_FIELD_NAME = 'openid_lj_user' LiveJournalOpenId.REQUIRED_FIELD_VERBOSE_NAME = _('LiveJournal username') # Reset to None in those OpenID backends where nothing is required. GoogleOpenId.REQUIRED_FIELD_NAME = None GoogleOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YahooOpenId.REQUIRED_FIELD_NAME = None YahooOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YandexOpenId.REQUIRED_FIELD_NAME = None YandexOpenId.REQUIRED_FIELD_VERBOSE_NAME = None + __activate_social_auth_monkeypatch()
Make version info tuple of ints.
## Code Before: __version__ = '0.6.0' __version_info__ = __version__.split('.') from django.utils.translation import ugettext_lazy as _ def __activate_social_auth_monkeypatch(): from social_core.backends.base import BaseAuth from social_core.backends.open_id import (OPENID_ID_FIELD, OpenIdAuth) from social_core.backends.livejournal import LiveJournalOpenId from social_core.backends.yahoo import YahooOpenId from social_core.backends.google import GoogleOpenId from social_core.backends.yandex import YandexOpenId BaseAuth.REQUIRED_FIELD_NAME = None BaseAuth.REQUIRED_FIELD_VERBOSE_NAME = None OpenIdAuth.REQUIRED_FIELD_NAME = OPENID_ID_FIELD OpenIdAuth.REQUIRED_FIELD_VERBOSE_NAME = _('OpenID identity') LiveJournalOpenId.REQUIRED_FIELD_NAME = 'openid_lj_user' LiveJournalOpenId.REQUIRED_FIELD_VERBOSE_NAME = _('LiveJournal username') # Reset to None in those OpenID backends where nothing is required. GoogleOpenId.REQUIRED_FIELD_NAME = None GoogleOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YahooOpenId.REQUIRED_FIELD_NAME = None YahooOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YandexOpenId.REQUIRED_FIELD_NAME = None YandexOpenId.REQUIRED_FIELD_VERBOSE_NAME = None __activate_social_auth_monkeypatch() ## Instruction: Make version info tuple of ints. ## Code After: __version__ = '0.6.0' __version_info__ = tuple(map(int, __version__.split('.'))) from django.utils.translation import ugettext_lazy as _ def __activate_social_auth_monkeypatch(): from social_core.backends.base import BaseAuth from social_core.backends.open_id import (OPENID_ID_FIELD, OpenIdAuth) from social_core.backends.livejournal import LiveJournalOpenId from social_core.backends.yahoo import YahooOpenId from social_core.backends.google import GoogleOpenId from social_core.backends.yandex import YandexOpenId BaseAuth.REQUIRED_FIELD_NAME = None BaseAuth.REQUIRED_FIELD_VERBOSE_NAME = None OpenIdAuth.REQUIRED_FIELD_NAME = OPENID_ID_FIELD OpenIdAuth.REQUIRED_FIELD_VERBOSE_NAME = _('OpenID identity') LiveJournalOpenId.REQUIRED_FIELD_NAME = 'openid_lj_user' LiveJournalOpenId.REQUIRED_FIELD_VERBOSE_NAME = _('LiveJournal username') # Reset to None in those OpenID backends where nothing is required. GoogleOpenId.REQUIRED_FIELD_NAME = None GoogleOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YahooOpenId.REQUIRED_FIELD_NAME = None YahooOpenId.REQUIRED_FIELD_VERBOSE_NAME = None YandexOpenId.REQUIRED_FIELD_NAME = None YandexOpenId.REQUIRED_FIELD_VERBOSE_NAME = None __activate_social_auth_monkeypatch()
// ... existing code ... __version__ = '0.6.0' __version_info__ = tuple(map(int, __version__.split('.'))) // ... modified code ... __activate_social_auth_monkeypatch() // ... rest of the code ...
7d82f3accce0cf174fd7cf176d5c289ffc791647
ds_queue.py
ds_queue.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division class Queue(object): """Queue class.""" def __init__(self): self.items = [] def is_empty(self): return self.items == [] def enqueue(self, item): self.items.insert(0, item) def dequeue(self): return self.items.pop() def size(self): return len(self.items) def show(self): return self.items def main(): queue = Queue() print('Is empty: {}'.format(queue.is_empty())) print('Enqueue "dog", 4 & 8.4') queue.enqueue('dog') queue.enqueue(4) queue.enqueue(8.4) print('Is empty: {}'.format(queue.is_empty())) print('Queue size: {}'.format(queue.size())) print('Dequeue: {}'.format(queue.dequeue())) print('Is empty: {}'.format(queue.is_empty())) print('Queue size: {}'.format(queue.size())) print('Show: {}'.format(queue.show())) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division class Queue(object): """Queue class.""" def __init__(self): self.items = [] def is_empty(self): return self.items == [] def peek(self): return self.items[-1] def enqueue(self, item): self.items.insert(0, item) def dequeue(self): return self.items.pop() def size(self): return len(self.items) def show(self): return self.items def main(): q = Queue() print('Is empty: {}'.format(q.is_empty())) print('Enqueue "dog", 4 & 8.4') q.enqueue('dog') q.enqueue(4) q.enqueue(8.4) print(q.peek()) print('Is empty: {}'.format(q.is_empty())) print('Queue size: {}'.format(q.size())) print('Dequeue: {}'.format(q.dequeue())) print('Is empty: {}'.format(q.is_empty())) print('Queue size: {}'.format(q.size())) print('Show: {}'.format(q.show())) if __name__ == '__main__': main()
Revise Queue instance to q
Revise Queue instance to q
Python
bsd-2-clause
bowen0701/algorithms_data_structures
from __future__ import absolute_import from __future__ import print_function from __future__ import division class Queue(object): """Queue class.""" def __init__(self): self.items = [] def is_empty(self): return self.items == [] + + def peek(self): + return self.items[-1] def enqueue(self, item): self.items.insert(0, item) def dequeue(self): return self.items.pop() def size(self): return len(self.items) def show(self): return self.items def main(): - queue = Queue() + q = Queue() - print('Is empty: {}'.format(queue.is_empty())) + print('Is empty: {}'.format(q.is_empty())) print('Enqueue "dog", 4 & 8.4') - queue.enqueue('dog') + q.enqueue('dog') - queue.enqueue(4) + q.enqueue(4) - queue.enqueue(8.4) + q.enqueue(8.4) + print(q.peek()) - print('Is empty: {}'.format(queue.is_empty())) + print('Is empty: {}'.format(q.is_empty())) - print('Queue size: {}'.format(queue.size())) + print('Queue size: {}'.format(q.size())) - print('Dequeue: {}'.format(queue.dequeue())) + print('Dequeue: {}'.format(q.dequeue())) - print('Is empty: {}'.format(queue.is_empty())) + print('Is empty: {}'.format(q.is_empty())) - print('Queue size: {}'.format(queue.size())) + print('Queue size: {}'.format(q.size())) - print('Show: {}'.format(queue.show())) + print('Show: {}'.format(q.show())) + if __name__ == '__main__': main()
Revise Queue instance to q
## Code Before: from __future__ import absolute_import from __future__ import print_function from __future__ import division class Queue(object): """Queue class.""" def __init__(self): self.items = [] def is_empty(self): return self.items == [] def enqueue(self, item): self.items.insert(0, item) def dequeue(self): return self.items.pop() def size(self): return len(self.items) def show(self): return self.items def main(): queue = Queue() print('Is empty: {}'.format(queue.is_empty())) print('Enqueue "dog", 4 & 8.4') queue.enqueue('dog') queue.enqueue(4) queue.enqueue(8.4) print('Is empty: {}'.format(queue.is_empty())) print('Queue size: {}'.format(queue.size())) print('Dequeue: {}'.format(queue.dequeue())) print('Is empty: {}'.format(queue.is_empty())) print('Queue size: {}'.format(queue.size())) print('Show: {}'.format(queue.show())) if __name__ == '__main__': main() ## Instruction: Revise Queue instance to q ## Code After: from __future__ import absolute_import from __future__ import print_function from __future__ import division class Queue(object): """Queue class.""" def __init__(self): self.items = [] def is_empty(self): return self.items == [] def peek(self): return self.items[-1] def enqueue(self, item): self.items.insert(0, item) def dequeue(self): return self.items.pop() def size(self): return len(self.items) def show(self): return self.items def main(): q = Queue() print('Is empty: {}'.format(q.is_empty())) print('Enqueue "dog", 4 & 8.4') q.enqueue('dog') q.enqueue(4) q.enqueue(8.4) print(q.peek()) print('Is empty: {}'.format(q.is_empty())) print('Queue size: {}'.format(q.size())) print('Dequeue: {}'.format(q.dequeue())) print('Is empty: {}'.format(q.is_empty())) print('Queue size: {}'.format(q.size())) print('Show: {}'.format(q.show())) if __name__ == '__main__': main()
# ... existing code ... return self.items == [] def peek(self): return self.items[-1] # ... modified code ... def main(): q = Queue() print('Is empty: {}'.format(q.is_empty())) ... print('Enqueue "dog", 4 & 8.4') q.enqueue('dog') q.enqueue(4) q.enqueue(8.4) print(q.peek()) print('Is empty: {}'.format(q.is_empty())) print('Queue size: {}'.format(q.size())) print('Dequeue: {}'.format(q.dequeue())) print('Is empty: {}'.format(q.is_empty())) print('Queue size: {}'.format(q.size())) print('Show: {}'.format(q.show())) # ... rest of the code ...
072774a36c82c3654cdabc6ebfd677b8603db49f
src/models/image.py
src/models/image.py
from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
Add a timestamp to the filename to allow for chronological ordering in the filesystem
Add a timestamp to the filename to allow for chronological ordering in the filesystem
Python
apache-2.0
CharlieCorner/pymage_downloader
+ import datetime from utils.utils import limit_file_name class Image(): - _file_name_pattern = "reddit_%s_%s_album_%s_%s_%s" + _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain + self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( - self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) + self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
Add a timestamp to the filename to allow for chronological ordering in the filesystem
## Code Before: from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) ## Instruction: Add a timestamp to the filename to allow for chronological ordering in the filesystem ## Code After: import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
# ... existing code ... import datetime from utils.utils import limit_file_name # ... modified code ... class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" ... self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") ... self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) # ... rest of the code ...
95a72d1f06c740b933983c2446b36bb450c4730e
ona_migration_script/migrate_toilet_codes.py
ona_migration_script/migrate_toilet_codes.py
import argparse parser = argparse.ArgumentParser(description='Migrate toilet codes') parser.add_argument( 'url', type=str, help='The base URL for the django toilet database') parser.add_argument( 'username', type=str, help='The username used to log in') parser.add_argument( 'password', type=str, help='The password used to log in')
import argparse parser = argparse.ArgumentParser(description='Migrate toilet codes') parser.add_argument( 'url', type=str, help='The base URL for the django toilet database') parser.add_argument( 'username', type=str, help='The username used to log in') parser.add_argument( 'password', type=str, help='The password used to log in') parser.add_argument( '--dryrun', '-d', action='store_true', help='Print out changes instead of uploading them.') args = parser.parse_args()
Add dryrun command line argument
Add dryrun command line argument
Python
bsd-3-clause
praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js
import argparse parser = argparse.ArgumentParser(description='Migrate toilet codes') parser.add_argument( 'url', type=str, help='The base URL for the django toilet database') parser.add_argument( 'username', type=str, help='The username used to log in') parser.add_argument( 'password', type=str, help='The password used to log in') + parser.add_argument( + '--dryrun', '-d', action='store_true', + help='Print out changes instead of uploading them.') + args = parser.parse_args()
Add dryrun command line argument
## Code Before: import argparse parser = argparse.ArgumentParser(description='Migrate toilet codes') parser.add_argument( 'url', type=str, help='The base URL for the django toilet database') parser.add_argument( 'username', type=str, help='The username used to log in') parser.add_argument( 'password', type=str, help='The password used to log in') ## Instruction: Add dryrun command line argument ## Code After: import argparse parser = argparse.ArgumentParser(description='Migrate toilet codes') parser.add_argument( 'url', type=str, help='The base URL for the django toilet database') parser.add_argument( 'username', type=str, help='The username used to log in') parser.add_argument( 'password', type=str, help='The password used to log in') parser.add_argument( '--dryrun', '-d', action='store_true', help='Print out changes instead of uploading them.') args = parser.parse_args()
# ... existing code ... 'password', type=str, help='The password used to log in') parser.add_argument( '--dryrun', '-d', action='store_true', help='Print out changes instead of uploading them.') args = parser.parse_args() # ... rest of the code ...
4d3f809ba5e1b5109e6f2e73d9c9630371660210
Bookie/bookie/lib/access.py
Bookie/bookie/lib/access.py
"""Handle auth and authz activities in bookie""" from pyramid.httpexceptions import HTTPForbidden class Authorize(object): """Context manager to check if the user is authorized use: with Authorize(some_key): # do work Will return NotAuthorized if it fails """ def __init__(self, submitted_key, config_key): """Create the context manager""" self.api_key = config_key self.check_key = submitted_key def __enter__(self): """Verify api key set in constructor""" if self.api_key != self.check_key: raise HTTPForbidden('Invalid Authorization') def __exit__(self, exc_type, exc_value, traceback): """No cleanup work to do after usage""" pass
"""Handle auth and authz activities in bookie""" import logging from pyramid.httpexceptions import HTTPForbidden LOG = logging.getLogger(__name__) class Authorize(object): """Context manager to check if the user is authorized use: with Authorize(some_key): # do work Will return NotAuthorized if it fails """ def __init__(self, submitted_key, config_key): """Create the context manager""" self.api_key = config_key self.check_key = submitted_key def __enter__(self): """Verify api key set in constructor""" if self.api_key != self.check_key: LOG.error('Invalid API Key! {0} v {1}'.format(self.api_key, self.check_key)) raise HTTPForbidden('Invalid Authorization') def __exit__(self, exc_type, exc_value, traceback): """No cleanup work to do after usage""" pass
Update to make sure we log an error with an invalid key
Update to make sure we log an error with an invalid key
Python
agpl-3.0
GreenLunar/Bookie,adamlincoln/Bookie,wangjun/Bookie,bookieio/Bookie,GreenLunar/Bookie,teodesson/Bookie,adamlincoln/Bookie,pombredanne/Bookie,bookieio/Bookie,teodesson/Bookie,adamlincoln/Bookie,GreenLunar/Bookie,wangjun/Bookie,wangjun/Bookie,skmezanul/Bookie,wangjun/Bookie,GreenLunar/Bookie,skmezanul/Bookie,adamlincoln/Bookie,pombredanne/Bookie,skmezanul/Bookie,bookieio/Bookie,charany1/Bookie,teodesson/Bookie,skmezanul/Bookie,charany1/Bookie,pombredanne/Bookie,charany1/Bookie,teodesson/Bookie,bookieio/Bookie
"""Handle auth and authz activities in bookie""" + import logging from pyramid.httpexceptions import HTTPForbidden + + + LOG = logging.getLogger(__name__) class Authorize(object): """Context manager to check if the user is authorized use: with Authorize(some_key): # do work Will return NotAuthorized if it fails """ def __init__(self, submitted_key, config_key): """Create the context manager""" self.api_key = config_key self.check_key = submitted_key def __enter__(self): """Verify api key set in constructor""" if self.api_key != self.check_key: + LOG.error('Invalid API Key! {0} v {1}'.format(self.api_key, + self.check_key)) raise HTTPForbidden('Invalid Authorization') def __exit__(self, exc_type, exc_value, traceback): """No cleanup work to do after usage""" pass
Update to make sure we log an error with an invalid key
## Code Before: """Handle auth and authz activities in bookie""" from pyramid.httpexceptions import HTTPForbidden class Authorize(object): """Context manager to check if the user is authorized use: with Authorize(some_key): # do work Will return NotAuthorized if it fails """ def __init__(self, submitted_key, config_key): """Create the context manager""" self.api_key = config_key self.check_key = submitted_key def __enter__(self): """Verify api key set in constructor""" if self.api_key != self.check_key: raise HTTPForbidden('Invalid Authorization') def __exit__(self, exc_type, exc_value, traceback): """No cleanup work to do after usage""" pass ## Instruction: Update to make sure we log an error with an invalid key ## Code After: """Handle auth and authz activities in bookie""" import logging from pyramid.httpexceptions import HTTPForbidden LOG = logging.getLogger(__name__) class Authorize(object): """Context manager to check if the user is authorized use: with Authorize(some_key): # do work Will return NotAuthorized if it fails """ def __init__(self, submitted_key, config_key): """Create the context manager""" self.api_key = config_key self.check_key = submitted_key def __enter__(self): """Verify api key set in constructor""" if self.api_key != self.check_key: LOG.error('Invalid API Key! {0} v {1}'.format(self.api_key, self.check_key)) raise HTTPForbidden('Invalid Authorization') def __exit__(self, exc_type, exc_value, traceback): """No cleanup work to do after usage""" pass
... """Handle auth and authz activities in bookie""" import logging from pyramid.httpexceptions import HTTPForbidden LOG = logging.getLogger(__name__) ... if self.api_key != self.check_key: LOG.error('Invalid API Key! {0} v {1}'.format(self.api_key, self.check_key)) raise HTTPForbidden('Invalid Authorization') ...
335a33465e197c9a2e52ed9de90546e2ca6173ee
tests/test_websocket_subscriber.py
tests/test_websocket_subscriber.py
"""Tests for the WebSocketSubscriber handlers.""" import json import pytest from tornado.web import Application from tornado.websocket import websocket_connect from tornadose.handlers import WebSocketSubscriber import utilities @pytest.fixture def store(): return utilities.TestStore() @pytest.fixture def app(): return Application([ (r'/', WebSocketSubscriber, dict(store=store)) ]) @pytest.mark.gen_test def test_get_message(http_server, io_loop, base_url, store): conn = yield websocket_connect('ws' + base_url.split('http')[1]) store.submit('test') io_loop.call_later(0.01, store.publish) msg = yield conn.read_message() msg = json.loads(msg) assert msg['data'] == 'test' conn.close()
"""Tests for the WebSocketSubscriber handlers.""" import json from tornado.ioloop import IOLoop from tornado.web import Application from tornado.websocket import websocket_connect from tornado.testing import AsyncHTTPTestCase, gen_test from tornadose.handlers import WebSocketSubscriber import utilities class WebSocketSubscriberTestCase(AsyncHTTPTestCase): def setUp(self): self.store = utilities.TestStore() super(WebSocketSubscriberTestCase, self).setUp() def get_app(self): return Application([ (r'/', WebSocketSubscriber, dict(store=self.store)) ]) @gen_test def test_get_message(self): url = self.get_url('/').replace("http://", "ws://") conn = yield websocket_connect(url) self.store.submit('test') IOLoop.current().call_later(0.01, self.store.publish) msg = yield conn.read_message() msg = json.loads(msg) self.assertEqual(msg['data'], 'test') conn.close()
Fix test case for WebSocketSubscriber
Fix test case for WebSocketSubscriber Switched to unittest-style testing (pytest is a bit too magical especially with the pytest-tornado extension). I may change all tests later to use unittest.
Python
mit
mivade/tornadose
"""Tests for the WebSocketSubscriber handlers.""" import json - import pytest + from tornado.ioloop import IOLoop from tornado.web import Application from tornado.websocket import websocket_connect + from tornado.testing import AsyncHTTPTestCase, gen_test from tornadose.handlers import WebSocketSubscriber import utilities - @pytest.fixture - def store(): + class WebSocketSubscriberTestCase(AsyncHTTPTestCase): + def setUp(self): - return utilities.TestStore() + self.store = utilities.TestStore() + super(WebSocketSubscriberTestCase, self).setUp() + def get_app(self): + return Application([ + (r'/', WebSocketSubscriber, dict(store=self.store)) + ]) - @pytest.fixture - def app(): - return Application([ - (r'/', WebSocketSubscriber, dict(store=store)) - ]) + @gen_test + def test_get_message(self): + url = self.get_url('/').replace("http://", "ws://") + conn = yield websocket_connect(url) + self.store.submit('test') + IOLoop.current().call_later(0.01, self.store.publish) + msg = yield conn.read_message() + msg = json.loads(msg) + self.assertEqual(msg['data'], 'test') + conn.close() - - @pytest.mark.gen_test - def test_get_message(http_server, io_loop, base_url, store): - conn = yield websocket_connect('ws' + base_url.split('http')[1]) - store.submit('test') - io_loop.call_later(0.01, store.publish) - msg = yield conn.read_message() - msg = json.loads(msg) - assert msg['data'] == 'test' - conn.close() -
Fix test case for WebSocketSubscriber
## Code Before: """Tests for the WebSocketSubscriber handlers.""" import json import pytest from tornado.web import Application from tornado.websocket import websocket_connect from tornadose.handlers import WebSocketSubscriber import utilities @pytest.fixture def store(): return utilities.TestStore() @pytest.fixture def app(): return Application([ (r'/', WebSocketSubscriber, dict(store=store)) ]) @pytest.mark.gen_test def test_get_message(http_server, io_loop, base_url, store): conn = yield websocket_connect('ws' + base_url.split('http')[1]) store.submit('test') io_loop.call_later(0.01, store.publish) msg = yield conn.read_message() msg = json.loads(msg) assert msg['data'] == 'test' conn.close() ## Instruction: Fix test case for WebSocketSubscriber ## Code After: """Tests for the WebSocketSubscriber handlers.""" import json from tornado.ioloop import IOLoop from tornado.web import Application from tornado.websocket import websocket_connect from tornado.testing import AsyncHTTPTestCase, gen_test from tornadose.handlers import WebSocketSubscriber import utilities class WebSocketSubscriberTestCase(AsyncHTTPTestCase): def setUp(self): self.store = utilities.TestStore() super(WebSocketSubscriberTestCase, self).setUp() def get_app(self): return Application([ (r'/', WebSocketSubscriber, dict(store=self.store)) ]) @gen_test def test_get_message(self): url = self.get_url('/').replace("http://", "ws://") conn = yield websocket_connect(url) self.store.submit('test') IOLoop.current().call_later(0.01, self.store.publish) msg = yield conn.read_message() msg = json.loads(msg) self.assertEqual(msg['data'], 'test') conn.close()
# ... existing code ... import json from tornado.ioloop import IOLoop from tornado.web import Application # ... modified code ... from tornado.websocket import websocket_connect from tornado.testing import AsyncHTTPTestCase, gen_test ... class WebSocketSubscriberTestCase(AsyncHTTPTestCase): def setUp(self): self.store = utilities.TestStore() super(WebSocketSubscriberTestCase, self).setUp() def get_app(self): return Application([ (r'/', WebSocketSubscriber, dict(store=self.store)) ]) @gen_test def test_get_message(self): url = self.get_url('/').replace("http://", "ws://") conn = yield websocket_connect(url) self.store.submit('test') IOLoop.current().call_later(0.01, self.store.publish) msg = yield conn.read_message() msg = json.loads(msg) self.assertEqual(msg['data'], 'test') conn.close() # ... rest of the code ...
45ee26fae4a8d31b66e3307c0ab4aed21678b4b6
scrubadub/filth/named_entity.py
scrubadub/filth/named_entity.py
from .base import Filth class NamedEntityFilth(Filth): """ Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org) """ type = 'named_entity' def __init__(self, *args, label: str, **kwargs): super(NamedEntityFilth, self).__init__(*args, **kwargs) self.type = "{}_{}".format(self.type, label).lower()
from .base import Filth class NamedEntityFilth(Filth): """ Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org) """ type = 'named_entity' def __init__(self, *args, label: str, **kwargs): super(NamedEntityFilth, self).__init__(*args, **kwargs) self.label = label.lower()
Revert NamedEntityFilth name because it was a bad idea
Revert NamedEntityFilth name because it was a bad idea
Python
mit
deanmalmgren/scrubadub,datascopeanalytics/scrubadub,deanmalmgren/scrubadub,datascopeanalytics/scrubadub
from .base import Filth class NamedEntityFilth(Filth): """ Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org) """ type = 'named_entity' def __init__(self, *args, label: str, **kwargs): super(NamedEntityFilth, self).__init__(*args, **kwargs) - self.type = "{}_{}".format(self.type, label).lower() + self.label = label.lower()
Revert NamedEntityFilth name because it was a bad idea
## Code Before: from .base import Filth class NamedEntityFilth(Filth): """ Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org) """ type = 'named_entity' def __init__(self, *args, label: str, **kwargs): super(NamedEntityFilth, self).__init__(*args, **kwargs) self.type = "{}_{}".format(self.type, label).lower() ## Instruction: Revert NamedEntityFilth name because it was a bad idea ## Code After: from .base import Filth class NamedEntityFilth(Filth): """ Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org) """ type = 'named_entity' def __init__(self, *args, label: str, **kwargs): super(NamedEntityFilth, self).__init__(*args, **kwargs) self.label = label.lower()
... super(NamedEntityFilth, self).__init__(*args, **kwargs) self.label = label.lower() ...
e697743b89f262a179881e2c58e2422a146248d0
db_cleanup.py
db_cleanup.py
import os, datetime def clean_up(): # Set Django settings module. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings") # import our blog stuff from StackSmash.apps.blog.models import Comment, Post # if comments are not listed and older than a week, delete them. Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete() # comments = Comment.objects.filter(listed=False).order_by("created") # If comments are older than a day, delete them. # for comment in comments: # if comment.created.day < datetime.datetime.now().day: # Comment.objects.filter(listed=False, id=comment.id).delete() if __name__ == "__main__": clean_up()
import os, datetime def clean_up(): # Set Django settings module. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings") # import our blog stuff from StackSmash.apps.blog.models import Comment, Post # if comments are not listed and older than a week, delete them. Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete() if __name__ == "__main__": clean_up()
Add cron information, clean up old cruft that isnt needed.
Add cron information, clean up old cruft that isnt needed.
Python
bsd-2-clause
Justasic/StackSmash,Justasic/StackSmash
import os, datetime def clean_up(): # Set Django settings module. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings") # import our blog stuff from StackSmash.apps.blog.models import Comment, Post # if comments are not listed and older than a week, delete them. Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete() - # comments = Comment.objects.filter(listed=False).order_by("created") - - # If comments are older than a day, delete them. - # for comment in comments: - # if comment.created.day < datetime.datetime.now().day: - # Comment.objects.filter(listed=False, id=comment.id).delete() - - if __name__ == "__main__": clean_up()
Add cron information, clean up old cruft that isnt needed.
## Code Before: import os, datetime def clean_up(): # Set Django settings module. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings") # import our blog stuff from StackSmash.apps.blog.models import Comment, Post # if comments are not listed and older than a week, delete them. Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete() # comments = Comment.objects.filter(listed=False).order_by("created") # If comments are older than a day, delete them. # for comment in comments: # if comment.created.day < datetime.datetime.now().day: # Comment.objects.filter(listed=False, id=comment.id).delete() if __name__ == "__main__": clean_up() ## Instruction: Add cron information, clean up old cruft that isnt needed. ## Code After: import os, datetime def clean_up(): # Set Django settings module. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings") # import our blog stuff from StackSmash.apps.blog.models import Comment, Post # if comments are not listed and older than a week, delete them. Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete() if __name__ == "__main__": clean_up()
// ... existing code ... if __name__ == "__main__": // ... rest of the code ...