commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
f0246b9897d89c1ec6f2361bbb488c4e162e5c5e
reddit_liveupdate/utils.py
reddit_liveupdate/utils.py
import itertools import pytz from babel.dates import format_time from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, )
import datetime import itertools import pytz from babel.dates import format_time, format_datetime from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) today = datetime.datetime.now(display_tz).date() date = dt.astimezone(display_tz).date() if date == today: return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) elif today - date < datetime.timedelta(days=365): return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM HH:mm z", locale=c.locale, ) else: return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM YYYY HH:mm z", locale=c.locale, )
Make timestamps more specific as temporal context fades.
Make timestamps more specific as temporal context fades. Fixes #6.
Python
bsd-3-clause
madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate
python
## Code Before: import itertools import pytz from babel.dates import format_time from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) ## Instruction: Make timestamps more specific as temporal context fades. Fixes #6. ## Code After: import datetime import itertools import pytz from babel.dates import format_time, format_datetime from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) today = datetime.datetime.now(display_tz).date() date = dt.astimezone(display_tz).date() if date == today: return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) elif today - date < datetime.timedelta(days=365): return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM HH:mm z", locale=c.locale, ) else: return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM YYYY HH:mm z", locale=c.locale, )
// ... existing code ... import datetime import itertools import pytz from babel.dates import format_time, format_datetime from pylons import c // ... modified code ... def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) today = datetime.datetime.now(display_tz).date() date = dt.astimezone(display_tz).date() if date == today: return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) elif today - date < datetime.timedelta(days=365): return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM HH:mm z", locale=c.locale, ) else: return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM YYYY HH:mm z", locale=c.locale, ) // ... rest of the code ...
559c89fcbf5560c5d72ef1824abe1322bf894dc8
StarFlight/Classes/SCStarFlightPushClient.h
StarFlight/Classes/SCStarFlightPushClient.h
// // StarFlightPushClient.h // // Created by Starcut Software on 4/30/13. // Copyright (c) Starcut. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN extern NSString *const SCStarFlightClientUUIDNotification; @interface SCStarFlightPushClient : NSObject <NSURLConnectionDelegate> - (instancetype)initWithAppID:(NSString *)appID clientSecret:(NSString *)clientSecret; - (void)registerWithToken:(NSString *)token; - (void)registerWithToken:(NSString *)token clientUUID:(NSString *)clientUUID tags:(NSArray<NSString *> *)tags timePreferences:(NSDictionary *)timePreferencesDict; - (void)registerWithToken:(NSString *)token tags:(NSArray<NSString *> *)tags; - (void)unregisterWithToken:(NSString *)token tags:(nullable NSArray<NSString *> *)tags; - (void)openedMessageWithUUID:(NSString *)messageUUID deviceToken:(NSString *)deviceToken; NS_ASSUME_NONNULL_END @end
// // StarFlightPushClient.h // // Created by Starcut Software on 4/30/13. // Copyright (c) Starcut. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN extern NSString *const SCStarFlightClientUUIDNotification; @interface SCStarFlightPushClient : NSObject <NSURLConnectionDelegate> - (instancetype)initWithAppID:(NSString *)appID clientSecret:(NSString *)clientSecret; - (void)registerWithToken:(NSString *)token; - (void)registerWithToken:(NSString *)token clientUUID:(nullable NSString *)clientUUID tags:(nullable NSArray<NSString *> *)tags timePreferences:(nullable NSDictionary *)timePreferencesDict; - (void)registerWithToken:(NSString *)token tags:(NSArray<NSString *> *)tags; - (void)unregisterWithToken:(NSString *)token tags:(nullable NSArray<NSString *> *)tags; - (void)openedMessageWithUUID:(NSString *)messageUUID deviceToken:(NSString *)deviceToken; NS_ASSUME_NONNULL_END @end
Allow optional(nullable) parameters to register starflight-client
Allow optional(nullable) parameters to register starflight-client
C
mit
StarcutFinland/StarFlight
c
## Code Before: // // StarFlightPushClient.h // // Created by Starcut Software on 4/30/13. // Copyright (c) Starcut. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN extern NSString *const SCStarFlightClientUUIDNotification; @interface SCStarFlightPushClient : NSObject <NSURLConnectionDelegate> - (instancetype)initWithAppID:(NSString *)appID clientSecret:(NSString *)clientSecret; - (void)registerWithToken:(NSString *)token; - (void)registerWithToken:(NSString *)token clientUUID:(NSString *)clientUUID tags:(NSArray<NSString *> *)tags timePreferences:(NSDictionary *)timePreferencesDict; - (void)registerWithToken:(NSString *)token tags:(NSArray<NSString *> *)tags; - (void)unregisterWithToken:(NSString *)token tags:(nullable NSArray<NSString *> *)tags; - (void)openedMessageWithUUID:(NSString *)messageUUID deviceToken:(NSString *)deviceToken; NS_ASSUME_NONNULL_END @end ## Instruction: Allow optional(nullable) parameters to register starflight-client ## Code After: // // StarFlightPushClient.h // // Created by Starcut Software on 4/30/13. // Copyright (c) Starcut. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN extern NSString *const SCStarFlightClientUUIDNotification; @interface SCStarFlightPushClient : NSObject <NSURLConnectionDelegate> - (instancetype)initWithAppID:(NSString *)appID clientSecret:(NSString *)clientSecret; - (void)registerWithToken:(NSString *)token; - (void)registerWithToken:(NSString *)token clientUUID:(nullable NSString *)clientUUID tags:(nullable NSArray<NSString *> *)tags timePreferences:(nullable NSDictionary *)timePreferencesDict; - (void)registerWithToken:(NSString *)token tags:(NSArray<NSString *> *)tags; - (void)unregisterWithToken:(NSString *)token tags:(nullable NSArray<NSString *> *)tags; - (void)openedMessageWithUUID:(NSString *)messageUUID deviceToken:(NSString *)deviceToken; NS_ASSUME_NONNULL_END @end
# ... existing code ... - (instancetype)initWithAppID:(NSString *)appID clientSecret:(NSString *)clientSecret; - (void)registerWithToken:(NSString *)token; - (void)registerWithToken:(NSString *)token clientUUID:(nullable NSString *)clientUUID tags:(nullable NSArray<NSString *> *)tags timePreferences:(nullable NSDictionary *)timePreferencesDict; - (void)registerWithToken:(NSString *)token tags:(NSArray<NSString *> *)tags; - (void)unregisterWithToken:(NSString *)token tags:(nullable NSArray<NSString *> *)tags; - (void)openedMessageWithUUID:(NSString *)messageUUID deviceToken:(NSString *)deviceToken; # ... rest of the code ...
03d62abc0f48e49e1bfd672ab6c7c60cd8f6fef5
users/models.py
users/models.py
from django.contrib.auth.models import AbstractUser from django.db import models class Person(AbstractUser): description = models.TextField(blank=True) def __str__(self): return "User(<{}>}".format(self.email)
from django.contrib.auth.models import AbstractUser from django.db import models class Person(AbstractUser): description = models.TextField(blank=True) def __str__(self): if self.email: return "User(<{}>)".format(self.email) return "User(<{}>)".format(self.username)
Fix return string from user model
Fix return string from user model Also return username if there is no email address set
Python
mit
Nikola-K/django-template,Nikola-K/django-template
python
## Code Before: from django.contrib.auth.models import AbstractUser from django.db import models class Person(AbstractUser): description = models.TextField(blank=True) def __str__(self): return "User(<{}>}".format(self.email) ## Instruction: Fix return string from user model Also return username if there is no email address set ## Code After: from django.contrib.auth.models import AbstractUser from django.db import models class Person(AbstractUser): description = models.TextField(blank=True) def __str__(self): if self.email: return "User(<{}>)".format(self.email) return "User(<{}>)".format(self.username)
// ... existing code ... description = models.TextField(blank=True) def __str__(self): if self.email: return "User(<{}>)".format(self.email) return "User(<{}>)".format(self.username) // ... rest of the code ...
8dcf6c373316d21399fa1edd276cea357fea75fb
groundstation/sockets/stream_socket.py
groundstation/sockets/stream_socket.py
import socket import groundstation.logger log = groundstation.logger.getLogger(__name__) from groundstation.peer_socket import PeerSocket class StreamSocket(object): """Wraps a TCP socket""" def __init__(self): self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # XXX Implement the queue as a seperate class/ self.write_queue = [] def fileno(self): """Return the underlying socket to make select() work""" return self._sock.fileno() @property def socket(self): return self._sock def accept(self): p = self._sock.accept() log.info("Accepted a connection from %s" % repr(p[1])) return PeerSocket.from_accept(p) def enqueue(self, data): """Enqueues data for writing inside the select loop""" self.write_queue.insert(0, data) def send(self): data = self.write_queue.pop() log.info("Attempting to write %i bytes" % (len(data))) self._sock.send(data) def has_data_ready(self): """(bool) does this socket have enqueued data ready""" return len(self.write_queue) > 0
import socket import groundstation.logger log = groundstation.logger.getLogger(__name__) from groundstation.peer_socket import PeerSocket class StreamSocket(object): """Wraps a TCP socket""" def __init__(self): self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # XXX Implement the queue as a seperate class/ self.write_queue = [] def fileno(self): """Return the underlying socket to make select() work""" return self._sock.fileno() @property def socket(self): return self._sock def accept(self): p = self._sock.accept() log.info("Accepted a connection from %s" % repr(p[1])) return PeerSocket.from_accept(p) def enqueue(self, data): """Enqueues data for writing inside the select loop""" if hasattr(data, "SerializeToString"): data = data.SerializeToString() self.write_queue.insert(0, data) def send(self): data = self.write_queue.pop() log.info("Attempting to write %i bytes" % (len(data))) self._sock.send(data) def has_data_ready(self): """(bool) does this socket have enqueued data ready""" return len(self.write_queue) > 0
Support being given protobuf Messages
Support being given protobuf Messages
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
python
## Code Before: import socket import groundstation.logger log = groundstation.logger.getLogger(__name__) from groundstation.peer_socket import PeerSocket class StreamSocket(object): """Wraps a TCP socket""" def __init__(self): self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # XXX Implement the queue as a seperate class/ self.write_queue = [] def fileno(self): """Return the underlying socket to make select() work""" return self._sock.fileno() @property def socket(self): return self._sock def accept(self): p = self._sock.accept() log.info("Accepted a connection from %s" % repr(p[1])) return PeerSocket.from_accept(p) def enqueue(self, data): """Enqueues data for writing inside the select loop""" self.write_queue.insert(0, data) def send(self): data = self.write_queue.pop() log.info("Attempting to write %i bytes" % (len(data))) self._sock.send(data) def has_data_ready(self): """(bool) does this socket have enqueued data ready""" return len(self.write_queue) > 0 ## Instruction: Support being given protobuf Messages ## Code After: import socket import groundstation.logger log = groundstation.logger.getLogger(__name__) from groundstation.peer_socket import PeerSocket class StreamSocket(object): """Wraps a TCP socket""" def __init__(self): self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # XXX Implement the queue as a seperate class/ self.write_queue = [] def fileno(self): """Return the underlying socket to make select() work""" return self._sock.fileno() @property def socket(self): return self._sock def accept(self): p = self._sock.accept() log.info("Accepted a connection from %s" % repr(p[1])) return PeerSocket.from_accept(p) def enqueue(self, data): """Enqueues data for writing inside the select loop""" if hasattr(data, "SerializeToString"): data = data.SerializeToString() self.write_queue.insert(0, data) def send(self): data = self.write_queue.pop() log.info("Attempting to write %i bytes" % (len(data))) self._sock.send(data) def has_data_ready(self): """(bool) does this socket have enqueued data ready""" return len(self.write_queue) > 0
# ... existing code ... def enqueue(self, data): """Enqueues data for writing inside the select loop""" if hasattr(data, "SerializeToString"): data = data.SerializeToString() self.write_queue.insert(0, data) def send(self): # ... rest of the code ...
e1b0222c8a3ed39bf76af10484a94aa4cfe5adc8
googlesearch/templatetags/search_tags.py
googlesearch/templatetags/search_tags.py
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
Remove last_page not needed anymore.
Remove last_page not needed anymore.
Python
mit
hzdg/django-google-search,hzdg/django-google-search
python
## Code Before: import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context ## Instruction: Remove last_page not needed anymore. ## Code After: import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
... max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 ...
647707293524440f014ed0a3ef7d4322a96775e4
tests/example_app/flask_app.py
tests/example_app/flask_app.py
import flask from pale.adapters import flask as pale_flask_adapter from tests.example_app import api def create_pale_flask_app(): """Creates a flask app, and registers a blueprint bound to pale.""" blueprint = flask.Blueprint('api', 'tests.example_app') pale_flask_adapter.bind_blueprint(api, blueprint) app = flask.Flask(__name__) app.register_blueprint(blueprint, url_prefix='/api') return app
import flask from pale.adapters import flask as pale_flask_adapter from pale.config import authenticator, context_creator from tests.example_app import api @authenticator def authenticate_pale_context(context): """Don't actually authenticate anything in this test.""" return context @context_creator def create_pale_context(endpoint,request): return pale_flask_adapter.DefaultFlaskContext(endpoint, request) def create_pale_flask_app(): """Creates a flask app, and registers a blueprint bound to pale.""" blueprint = flask.Blueprint('api', 'tests.example_app') pale_flask_adapter.bind_blueprint(api, blueprint) app = flask.Flask(__name__) app.register_blueprint(blueprint, url_prefix='/api') return app
Add authenticator and context creator to example app
Add authenticator and context creator to example app
Python
mit
Loudr/pale
python
## Code Before: import flask from pale.adapters import flask as pale_flask_adapter from tests.example_app import api def create_pale_flask_app(): """Creates a flask app, and registers a blueprint bound to pale.""" blueprint = flask.Blueprint('api', 'tests.example_app') pale_flask_adapter.bind_blueprint(api, blueprint) app = flask.Flask(__name__) app.register_blueprint(blueprint, url_prefix='/api') return app ## Instruction: Add authenticator and context creator to example app ## Code After: import flask from pale.adapters import flask as pale_flask_adapter from pale.config import authenticator, context_creator from tests.example_app import api @authenticator def authenticate_pale_context(context): """Don't actually authenticate anything in this test.""" return context @context_creator def create_pale_context(endpoint,request): return pale_flask_adapter.DefaultFlaskContext(endpoint, request) def create_pale_flask_app(): """Creates a flask app, and registers a blueprint bound to pale.""" blueprint = flask.Blueprint('api', 'tests.example_app') pale_flask_adapter.bind_blueprint(api, blueprint) app = flask.Flask(__name__) app.register_blueprint(blueprint, url_prefix='/api') return app
// ... existing code ... import flask from pale.adapters import flask as pale_flask_adapter from pale.config import authenticator, context_creator from tests.example_app import api @authenticator def authenticate_pale_context(context): """Don't actually authenticate anything in this test.""" return context @context_creator def create_pale_context(endpoint,request): return pale_flask_adapter.DefaultFlaskContext(endpoint, request) def create_pale_flask_app(): // ... rest of the code ...
0c3c3921897816f0c7c1c74ad0b52a25cef5b742
tests/compat.py
tests/compat.py
import sys if sys.version_info.major < 3: import unittest2 as unittest else: import unittest
from evelink.thirdparty.six import PY2 if PY2: import unittest2 as unittest else: import unittest
Use six for easy version info.
[PY3] Use six for easy version info.
Python
mit
Morloth1274/EVE-Online-POCO-manager,FashtimeDotCom/evelink,ayust/evelink,zigdon/evelink,bastianh/evelink
python
## Code Before: import sys if sys.version_info.major < 3: import unittest2 as unittest else: import unittest ## Instruction: [PY3] Use six for easy version info. ## Code After: from evelink.thirdparty.six import PY2 if PY2: import unittest2 as unittest else: import unittest
// ... existing code ... from evelink.thirdparty.six import PY2 if PY2: import unittest2 as unittest else: import unittest // ... rest of the code ...
f90bb76d0ea96d1a6115fec0f5a922832b02759c
Interfaces/WorldLogicInterface.h
Interfaces/WorldLogicInterface.h
/** * For conditions of distribution and use, see copyright notice in license.txt * * @file WorldLogicInterface.h * @brief */ #include "ServiceInterface.h" #ifndef incl_Interfaces_WorldLogicInterface_h #define incl_Interfaces_WorldLogicInterface_h #include "ForwardDefines.h" #include <QObject> class QString; namespace Foundation { class WorldLogicInterface : public QObject, public ServiceInterface { Q_OBJECT public: /// Default constructor. WorldLogicInterface() {} /// Destructor. virtual ~WorldLogicInterface() {} /// Returns user's avatar entity. virtual Scene::EntityPtr GetUserAvatarEntity() const = 0; /// Returns currently active camera entity. virtual Scene::EntityPtr GetCameraEntity() const = 0; /// Returns entity with certain entity component in it or null if not found. /// @param entity_id Entity ID. /// @param component Type name of the component. virtual Scene::EntityPtr GetEntityWithComponent(uint entity_id, const QString &component) const = 0; /// Hack function for getting EC_AvatarAppearance info to UiModule virtual const QString &GetAvatarAppearanceProperty(const QString &name) const = 0; signals: /// Emitted just before we start to delete world (scene). void AboutToDeleteWorld(); }; } #endif
/** * For conditions of distribution and use, see copyright notice in license.txt * * @file WorldLogicInterface.h * @brief */ #ifndef incl_Interfaces_WorldLogicInterface_h #define incl_Interfaces_WorldLogicInterface_h #include "ServiceInterface.h" #include "ForwardDefines.h" #include <QObject> class QString; namespace Foundation { class WorldLogicInterface : public QObject, public ServiceInterface { Q_OBJECT public: /// Default constructor. WorldLogicInterface() {} /// Destructor. virtual ~WorldLogicInterface() {} /// Returns user's avatar entity. virtual Scene::EntityPtr GetUserAvatarEntity() const = 0; /// Returns currently active camera entity. virtual Scene::EntityPtr GetCameraEntity() const = 0; /// Returns entity with certain entity component in it or null if not found. /// @param entity_id Entity ID. /// @param component Type name of the component. virtual Scene::EntityPtr GetEntityWithComponent(uint entity_id, const QString &component) const = 0; /// Hack function for getting EC_AvatarAppearance info to UiModule virtual const QString &GetAvatarAppearanceProperty(const QString &name) const = 0; signals: /// Emitted just before we start to delete world (scene). void AboutToDeleteWorld(); }; } #endif
Move include within include guards.
Move include within include guards.
C
apache-2.0
antont/tundra,jesterKing/naali,antont/tundra,realXtend/tundra,BogusCurry/tundra,realXtend/tundra,pharos3d/tundra,pharos3d/tundra,BogusCurry/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,jesterKing/naali,antont/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,jesterKing/naali,realXtend/tundra,BogusCurry/tundra,pharos3d/tundra,antont/tundra,jesterKing/naali,AlphaStaxLLC/tundra,AlphaStaxLLC/tundra,BogusCurry/tundra,realXtend/tundra,antont/tundra,jesterKing/naali,pharos3d/tundra,AlphaStaxLLC/tundra,jesterKing/naali,realXtend/tundra,pharos3d/tundra,antont/tundra,pharos3d/tundra,realXtend/tundra,AlphaStaxLLC/tundra,antont/tundra,jesterKing/naali
c
## Code Before: /** * For conditions of distribution and use, see copyright notice in license.txt * * @file WorldLogicInterface.h * @brief */ #include "ServiceInterface.h" #ifndef incl_Interfaces_WorldLogicInterface_h #define incl_Interfaces_WorldLogicInterface_h #include "ForwardDefines.h" #include <QObject> class QString; namespace Foundation { class WorldLogicInterface : public QObject, public ServiceInterface { Q_OBJECT public: /// Default constructor. WorldLogicInterface() {} /// Destructor. virtual ~WorldLogicInterface() {} /// Returns user's avatar entity. virtual Scene::EntityPtr GetUserAvatarEntity() const = 0; /// Returns currently active camera entity. virtual Scene::EntityPtr GetCameraEntity() const = 0; /// Returns entity with certain entity component in it or null if not found. /// @param entity_id Entity ID. /// @param component Type name of the component. virtual Scene::EntityPtr GetEntityWithComponent(uint entity_id, const QString &component) const = 0; /// Hack function for getting EC_AvatarAppearance info to UiModule virtual const QString &GetAvatarAppearanceProperty(const QString &name) const = 0; signals: /// Emitted just before we start to delete world (scene). void AboutToDeleteWorld(); }; } #endif ## Instruction: Move include within include guards. ## Code After: /** * For conditions of distribution and use, see copyright notice in license.txt * * @file WorldLogicInterface.h * @brief */ #ifndef incl_Interfaces_WorldLogicInterface_h #define incl_Interfaces_WorldLogicInterface_h #include "ServiceInterface.h" #include "ForwardDefines.h" #include <QObject> class QString; namespace Foundation { class WorldLogicInterface : public QObject, public ServiceInterface { Q_OBJECT public: /// Default constructor. WorldLogicInterface() {} /// Destructor. virtual ~WorldLogicInterface() {} /// Returns user's avatar entity. virtual Scene::EntityPtr GetUserAvatarEntity() const = 0; /// Returns currently active camera entity. virtual Scene::EntityPtr GetCameraEntity() const = 0; /// Returns entity with certain entity component in it or null if not found. /// @param entity_id Entity ID. /// @param component Type name of the component. virtual Scene::EntityPtr GetEntityWithComponent(uint entity_id, const QString &component) const = 0; /// Hack function for getting EC_AvatarAppearance info to UiModule virtual const QString &GetAvatarAppearanceProperty(const QString &name) const = 0; signals: /// Emitted just before we start to delete world (scene). void AboutToDeleteWorld(); }; } #endif
// ... existing code ... * @brief */ #ifndef incl_Interfaces_WorldLogicInterface_h #define incl_Interfaces_WorldLogicInterface_h #include "ServiceInterface.h" #include "ForwardDefines.h" #include <QObject> // ... rest of the code ...
efa7f27d6c6ae795abb99a0aca1de0a6d30733ea
src/test/java/org/mapdb/issues/Issue769_Map_putOnly.kt
src/test/java/org/mapdb/issues/Issue769_Map_putOnly.kt
package org.mapdb.issues import org.junit.Test import org.mapdb.* class Issue769_Map_putOnly{ object valueSer: Serializer<String> { val ser = ArrayList<String>() val deser = ArrayList<String>() override fun serialize(out: DataOutput2, value: String) { ser += value out.writeUTF(value) } override fun deserialize(input: DataInput2, available: Int): String { val v = input.readUTF() deser += v return v } } @Test fun hashMap(){ val m = DBMaker.memoryDB().make() .hashMap("map", Serializer.INTEGER, valueSer) .create() check(m) } @Test fun treeMap(){ val m = DBMaker.memoryDB().make() .treeMap("map", Serializer.INTEGER, valueSer) .valuesOutsideNodesEnable() .create() check(m) } private fun check(m: MapExtra<Int, String>) { m.put(1, "one") valueSer.deser.clear() valueSer.ser.clear() m.putOnly(1, "two") assert(valueSer.ser == arrayListOf("two")) assert(valueSer.deser.isEmpty()) } }
package org.mapdb.issues import org.junit.Test import org.mapdb.* class Issue769_Map_putOnly : Serializer<String>{ val ser = ArrayList<String>() val deser = ArrayList<String>() override fun serialize(out: DataOutput2, value: String) { ser += value out.writeUTF(value) } override fun deserialize(input: DataInput2, available: Int): String { val v = input.readUTF() deser += v return v } @Test fun hashMap(){ val m = DBMaker.memoryDB().make() .hashMap("map", Serializer.INTEGER, this) .create() check(m) } @Test fun treeMap(){ val m = DBMaker.memoryDB().make() .treeMap("map", Serializer.INTEGER, this) .valuesOutsideNodesEnable() .create() check(m) } private fun check(m: MapExtra<Int, String>) { m.put(1, "one") deser.clear() ser.clear() m.putOnly(1, "two") assert(ser == arrayListOf("two")) assert(deser.isEmpty()) } }
Fix race condition in unit test
Fix race condition in unit test
Kotlin
apache-2.0
jankotek/mapdb,jankotek/MapDB,jankotek/mapdb,jankotek/MapDB
kotlin
## Code Before: package org.mapdb.issues import org.junit.Test import org.mapdb.* class Issue769_Map_putOnly{ object valueSer: Serializer<String> { val ser = ArrayList<String>() val deser = ArrayList<String>() override fun serialize(out: DataOutput2, value: String) { ser += value out.writeUTF(value) } override fun deserialize(input: DataInput2, available: Int): String { val v = input.readUTF() deser += v return v } } @Test fun hashMap(){ val m = DBMaker.memoryDB().make() .hashMap("map", Serializer.INTEGER, valueSer) .create() check(m) } @Test fun treeMap(){ val m = DBMaker.memoryDB().make() .treeMap("map", Serializer.INTEGER, valueSer) .valuesOutsideNodesEnable() .create() check(m) } private fun check(m: MapExtra<Int, String>) { m.put(1, "one") valueSer.deser.clear() valueSer.ser.clear() m.putOnly(1, "two") assert(valueSer.ser == arrayListOf("two")) assert(valueSer.deser.isEmpty()) } } ## Instruction: Fix race condition in unit test ## Code After: package org.mapdb.issues import org.junit.Test import org.mapdb.* class Issue769_Map_putOnly : Serializer<String>{ val ser = ArrayList<String>() val deser = ArrayList<String>() override fun serialize(out: DataOutput2, value: String) { ser += value out.writeUTF(value) } override fun deserialize(input: DataInput2, available: Int): String { val v = input.readUTF() deser += v return v } @Test fun hashMap(){ val m = DBMaker.memoryDB().make() .hashMap("map", Serializer.INTEGER, this) .create() check(m) } @Test fun treeMap(){ val m = DBMaker.memoryDB().make() .treeMap("map", Serializer.INTEGER, this) .valuesOutsideNodesEnable() .create() check(m) } private fun check(m: MapExtra<Int, String>) { m.put(1, "one") deser.clear() ser.clear() m.putOnly(1, "two") assert(ser == arrayListOf("two")) assert(deser.isEmpty()) } }
... import org.junit.Test import org.mapdb.* class Issue769_Map_putOnly : Serializer<String>{ val ser = ArrayList<String>() val deser = ArrayList<String>() override fun serialize(out: DataOutput2, value: String) { ser += value out.writeUTF(value) } override fun deserialize(input: DataInput2, available: Int): String { val v = input.readUTF() deser += v return v } @Test fun hashMap(){ val m = DBMaker.memoryDB().make() .hashMap("map", Serializer.INTEGER, this) .create() check(m) ... @Test fun treeMap(){ val m = DBMaker.memoryDB().make() .treeMap("map", Serializer.INTEGER, this) .valuesOutsideNodesEnable() .create() ... private fun check(m: MapExtra<Int, String>) { m.put(1, "one") deser.clear() ser.clear() m.putOnly(1, "two") assert(ser == arrayListOf("two")) assert(deser.isEmpty()) } } ...
ee102dafde0ba2713a9094d447b76de247990ddd
sample/src/main/java/com/mapzen/android/sample/BasicMapzenActivity.java
sample/src/main/java/com/mapzen/android/sample/BasicMapzenActivity.java
package com.mapzen.android.sample; import com.mapzen.android.MapFragment; import com.mapzen.android.MapManager; import com.mapzen.tangram.MapController; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; /** * Basic SDK demo, tracks user's current location on map. */ public class BasicMapzenActivity extends AppCompatActivity { MapFragment mapFragment; MapController mapController; MapManager mapManager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sample_mapzen); mapFragment = (MapFragment) getSupportFragmentManager().findFragmentById(R.id.fragment); mapController = mapFragment.getMap(); mapManager = mapFragment.getMapManager(); configureMap(); } private void configureMap() { mapController.setMapZoom(17); mapManager.setMyLocationEnabled(true); } }
package com.mapzen.android.sample; import com.mapzen.android.MapFragment; import com.mapzen.android.MapManager; import com.mapzen.tangram.MapController; import com.mapzen.tangram.MapView; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; /** * Basic SDK demo, tracks user's current location on map. */ public class BasicMapzenActivity extends AppCompatActivity { MapFragment mapFragment; MapController mapController; MapManager mapManager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sample_mapzen); mapFragment = (MapFragment) getSupportFragmentManager().findFragmentById(R.id.fragment); mapFragment.getMapAsync(new MapView.OnMapReadyCallback() { @Override public void onMapReady(MapController mapController) { BasicMapzenActivity.this.mapController = mapController; configureMap(); } }); } private void configureMap() { mapManager = mapFragment.getMapManager(); mapManager.setMyLocationEnabled(true); } }
Use getMapAsync, original file was renamed
Use getMapAsync, original file was renamed
Java
apache-2.0
mapzen/android,mapzen/android
java
## Code Before: package com.mapzen.android.sample; import com.mapzen.android.MapFragment; import com.mapzen.android.MapManager; import com.mapzen.tangram.MapController; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; /** * Basic SDK demo, tracks user's current location on map. */ public class BasicMapzenActivity extends AppCompatActivity { MapFragment mapFragment; MapController mapController; MapManager mapManager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sample_mapzen); mapFragment = (MapFragment) getSupportFragmentManager().findFragmentById(R.id.fragment); mapController = mapFragment.getMap(); mapManager = mapFragment.getMapManager(); configureMap(); } private void configureMap() { mapController.setMapZoom(17); mapManager.setMyLocationEnabled(true); } } ## Instruction: Use getMapAsync, original file was renamed ## Code After: package com.mapzen.android.sample; import com.mapzen.android.MapFragment; import com.mapzen.android.MapManager; import com.mapzen.tangram.MapController; import com.mapzen.tangram.MapView; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; /** * Basic SDK demo, tracks user's current location on map. */ public class BasicMapzenActivity extends AppCompatActivity { MapFragment mapFragment; MapController mapController; MapManager mapManager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sample_mapzen); mapFragment = (MapFragment) getSupportFragmentManager().findFragmentById(R.id.fragment); mapFragment.getMapAsync(new MapView.OnMapReadyCallback() { @Override public void onMapReady(MapController mapController) { BasicMapzenActivity.this.mapController = mapController; configureMap(); } }); } private void configureMap() { mapManager = mapFragment.getMapManager(); mapManager.setMyLocationEnabled(true); } }
// ... existing code ... import com.mapzen.android.MapFragment; import com.mapzen.android.MapManager; import com.mapzen.tangram.MapController; import com.mapzen.tangram.MapView; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; // ... modified code ... setContentView(R.layout.activity_sample_mapzen); mapFragment = (MapFragment) getSupportFragmentManager().findFragmentById(R.id.fragment); mapFragment.getMapAsync(new MapView.OnMapReadyCallback() { @Override public void onMapReady(MapController mapController) { BasicMapzenActivity.this.mapController = mapController; configureMap(); } }); } private void configureMap() { mapManager = mapFragment.getMapManager(); mapManager.setMyLocationEnabled(true); } } // ... rest of the code ...
42bfa6b69697c0c093a961df5708f477288a6efa
icekit/plugins/twitter_embed/forms.py
icekit/plugins/twitter_embed/forms.py
import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
Add model and firld information to form.
Add model and firld information to form.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
python
## Code Before: import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url ## Instruction: Add model and firld information to form. ## Code After: import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
... import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. ... raise forms.ValidationError('Please provide a valid twitter link.') return url ...
fb21faaec025a0a6ca2d98c8b2381902f3b1444a
pybug/align/lucaskanade/__init__.py
pybug/align/lucaskanade/__init__.py
import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation)
import appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
Add GaborFourier to default import
Add GaborFourier to default import
Python
bsd-3-clause
menpo/menpo,yuxiang-zhou/menpo,grigorisg9gr/menpo,mozata/menpo,mozata/menpo,mozata/menpo,mozata/menpo,grigorisg9gr/menpo,menpo/menpo,menpo/menpo,jabooth/menpo-archive,jabooth/menpo-archive,jabooth/menpo-archive,yuxiang-zhou/menpo,grigorisg9gr/menpo,patricksnape/menpo,yuxiang-zhou/menpo,jabooth/menpo-archive,patricksnape/menpo,patricksnape/menpo
python
## Code Before: import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation) ## Instruction: Add GaborFourier to default import ## Code After: import appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
... from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation) ...
eb4456b752313383a573bacfc102db9149ee1854
django_transfer/urls.py
django_transfer/urls.py
from __future__ import unicode_literals try: from django.conf.urls import patterns, url except ImportError: from django.conf.urls.defaults import patterns, url urlpatterns = patterns( '', url(r'^download/.*$', 'django_transfer.views.download', name='download'), url(r'^upload/$', 'django_transfer.views.upload', name='upload'), )
from __future__ import unicode_literals try: from django.conf.urls import url def patterns(*args): return args except ImportError: from django.conf.urls.defaults import patterns, url from django_transfer.views import download, upload urlpatterns = patterns( url(r'^download/.*$', download, name='download'), url(r'^upload/$', upload, name='upload'), )
Fix URL patterns for different Django versions.
Fix URL patterns for different Django versions.
Python
mit
smartfile/django-transfer
python
## Code Before: from __future__ import unicode_literals try: from django.conf.urls import patterns, url except ImportError: from django.conf.urls.defaults import patterns, url urlpatterns = patterns( '', url(r'^download/.*$', 'django_transfer.views.download', name='download'), url(r'^upload/$', 'django_transfer.views.upload', name='upload'), ) ## Instruction: Fix URL patterns for different Django versions. ## Code After: from __future__ import unicode_literals try: from django.conf.urls import url def patterns(*args): return args except ImportError: from django.conf.urls.defaults import patterns, url from django_transfer.views import download, upload urlpatterns = patterns( url(r'^download/.*$', download, name='download'), url(r'^upload/$', upload, name='upload'), )
... from __future__ import unicode_literals try: from django.conf.urls import url def patterns(*args): return args except ImportError: from django.conf.urls.defaults import patterns, url from django_transfer.views import download, upload urlpatterns = patterns( url(r'^download/.*$', download, name='download'), url(r'^upload/$', upload, name='upload'), ) ...
79162ce8caaf6d1f66666cdd52f677de812af47a
test/Preprocessor/macro_paste_bcpl_comment.c
test/Preprocessor/macro_paste_bcpl_comment.c
// RUN: clang-cc %s -Eonly 2>&1 | grep error #define COMM1 / ## / COMM1
// RUN: clang-cc %s -Eonly -fms-extensions=0 2>&1 | grep error #define COMM1 / ## / COMM1
Disable Microsoft extensions to fix failure on Windows.
Disable Microsoft extensions to fix failure on Windows. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@84893 91177308-0d34-0410-b5e6-96231b3b80d8
C
apache-2.0
apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang
c
## Code Before: // RUN: clang-cc %s -Eonly 2>&1 | grep error #define COMM1 / ## / COMM1 ## Instruction: Disable Microsoft extensions to fix failure on Windows. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@84893 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: // RUN: clang-cc %s -Eonly -fms-extensions=0 2>&1 | grep error #define COMM1 / ## / COMM1
# ... existing code ... // RUN: clang-cc %s -Eonly -fms-extensions=0 2>&1 | grep error #define COMM1 / ## / COMM1 # ... rest of the code ...
01e4f3a271b47a3a978df86876bbc691d9cd121b
src/main/java/at/medunigraz/imi/bst/trec/model/Metrics.java
src/main/java/at/medunigraz/imi/bst/trec/model/Metrics.java
package at.medunigraz.imi.bst.trec.model; import java.util.Map; import java.util.Set; import java.util.TreeMap; public class Metrics { private Map<String, Double> metrics = new TreeMap<>(); public void put(String name, double value) { metrics.put(name, value); } public double getMetric(String name) { return metrics.getOrDefault(name, 0d); } public String getMetricsAsString() { StringBuilder sb = new StringBuilder(); Set<Map.Entry<String, Double>> entries = metrics.entrySet(); for (Map.Entry<String, Double> entry : entries) { sb.append(entry.getKey()); sb.append("="); sb.append(entry.getValue()); sb.append("\n"); } return sb.toString(); } }
package at.medunigraz.imi.bst.trec.model; import java.util.Map; import java.util.Set; import java.util.TreeMap; public class Metrics { private Map<String, Double> metrics = new TreeMap<>(); public void put(String name, double value) { metrics.put(name, value); } public double getMetric(String name) { return metrics.getOrDefault(name, 0d); } public String getMetricsAsString() { StringBuilder sb = new StringBuilder(); Set<Map.Entry<String, Double>> entries = metrics.entrySet(); for (Map.Entry<String, Double> entry : entries) { sb.append(entry.getKey()); sb.append("="); sb.append(entry.getValue()); sb.append("\n"); } return sb.toString(); } public double getNDCG() { return getMetric("ndcg"); } public double getRPrec() { return getMetric("Rprec"); } public double getInfAP() { return getMetric("infAP"); } public double getP10() { return getMetric("P_10"); } public double getF() { return getMetric("set_F"); } }
Add convenient methods to get some common metrics
Add convenient methods to get some common metrics
Java
mit
bst-mug/trec2017,michelole/trec2017,plopezgarcia/trec2017,bst-mug/trec2017,plopezgarcia/trec2017,plopezgarcia/trec2017,michelole/trec2017,bst-mug/trec2017,michelole/trec2017
java
## Code Before: package at.medunigraz.imi.bst.trec.model; import java.util.Map; import java.util.Set; import java.util.TreeMap; public class Metrics { private Map<String, Double> metrics = new TreeMap<>(); public void put(String name, double value) { metrics.put(name, value); } public double getMetric(String name) { return metrics.getOrDefault(name, 0d); } public String getMetricsAsString() { StringBuilder sb = new StringBuilder(); Set<Map.Entry<String, Double>> entries = metrics.entrySet(); for (Map.Entry<String, Double> entry : entries) { sb.append(entry.getKey()); sb.append("="); sb.append(entry.getValue()); sb.append("\n"); } return sb.toString(); } } ## Instruction: Add convenient methods to get some common metrics ## Code After: package at.medunigraz.imi.bst.trec.model; import java.util.Map; import java.util.Set; import java.util.TreeMap; public class Metrics { private Map<String, Double> metrics = new TreeMap<>(); public void put(String name, double value) { metrics.put(name, value); } public double getMetric(String name) { return metrics.getOrDefault(name, 0d); } public String getMetricsAsString() { StringBuilder sb = new StringBuilder(); Set<Map.Entry<String, Double>> entries = metrics.entrySet(); for (Map.Entry<String, Double> entry : entries) { sb.append(entry.getKey()); sb.append("="); sb.append(entry.getValue()); sb.append("\n"); } return sb.toString(); } public double getNDCG() { return getMetric("ndcg"); } public double getRPrec() { return getMetric("Rprec"); } public double getInfAP() { return getMetric("infAP"); } public double getP10() { return getMetric("P_10"); } public double getF() { return getMetric("set_F"); } }
// ... existing code ... return sb.toString(); } public double getNDCG() { return getMetric("ndcg"); } public double getRPrec() { return getMetric("Rprec"); } public double getInfAP() { return getMetric("infAP"); } public double getP10() { return getMetric("P_10"); } public double getF() { return getMetric("set_F"); } } // ... rest of the code ...
cd006f8d3885005e867255e63819fc8a5c7430bf
redactor/TextEditor.py
redactor/TextEditor.py
from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_panel(self): return self.text_panel
from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_widget(self): return self.editor def get_text_panel(self): return self.text_panel
Add getter for text widget
Add getter for text widget
Python
mit
BrickText/BrickText
python
## Code Before: from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_panel(self): return self.text_panel ## Instruction: Add getter for text widget ## Code After: from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_widget(self): return self.editor def get_text_panel(self): return self.text_panel
// ... existing code ... def get_root(self): return self.root def get_text_widget(self): return self.editor def get_text_panel(self): return self.text_panel // ... rest of the code ...
8d40cbd1d2cf431454dcfd9a9088be73687e7c1a
skimage/viewer/__init__.py
skimage/viewer/__init__.py
try: from .qt import QtGui as _QtGui except ImportError as e: raise ImportError('Viewer requires Qt') from .viewers import ImageViewer, CollectionViewer
import warnings try: from .viewers import ImageViewer, CollectionViewer except ImportError as e: warnings.warn('Viewer requires Qt')
Allow viewer package to import without Qt
Allow viewer package to import without Qt
Python
bsd-3-clause
pratapvardhan/scikit-image,paalge/scikit-image,youprofit/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,pratapvardhan/scikit-image,juliusbierk/scikit-image,ajaybhat/scikit-image,jwiggins/scikit-image,chriscrosscutler/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,bennlich/scikit-image,GaZ3ll3/scikit-image,blink1073/scikit-image,paalge/scikit-image,ofgulban/scikit-image,youprofit/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image,bennlich/scikit-image,emon10005/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,vighneshbirodkar/scikit-image,emon10005/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,Britefury/scikit-image,bsipocz/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,robintw/scikit-image,ClinicalGraphics/scikit-image,keflavich/scikit-image,rjeli/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,newville/scikit-image,ofgulban/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image,michaelpacer/scikit-image,Hiyorimi/scikit-image,Midafi/scikit-image,michaelaye/scikit-image,ClinicalGraphics/scikit-image,oew1v07/scikit-image,michaelpacer/scikit-image
python
## Code Before: try: from .qt import QtGui as _QtGui except ImportError as e: raise ImportError('Viewer requires Qt') from .viewers import ImageViewer, CollectionViewer ## Instruction: Allow viewer package to import without Qt ## Code After: import warnings try: from .viewers import ImageViewer, CollectionViewer except ImportError as e: warnings.warn('Viewer requires Qt')
// ... existing code ... import warnings try: from .viewers import ImageViewer, CollectionViewer except ImportError as e: warnings.warn('Viewer requires Qt') // ... rest of the code ...
0406ddcb3e22f8f3eb3b1fdba702e41ebe8b5bf0
connector/tests/__init__.py
connector/tests/__init__.py
from . import test_session from . import test_event from . import test_job from . import test_queue from . import test_worker from . import test_backend from . import test_producer from . import test_connector from . import test_mapper from . import test_related_action fast_suite = [ ] checks = [ test_session, test_event, test_job, test_queue, test_worker, test_backend, test_producer, test_connector, test_mapper, test_related_action, ]
from . import test_session from . import test_event from . import test_job from . import test_queue from . import test_worker from . import test_backend from . import test_producer from . import test_connector from . import test_mapper from . import test_related_action
Remove deprecated fast_suite and check list for unit tests
Remove deprecated fast_suite and check list for unit tests
Python
agpl-3.0
OCA/connector,OCA/connector
python
## Code Before: from . import test_session from . import test_event from . import test_job from . import test_queue from . import test_worker from . import test_backend from . import test_producer from . import test_connector from . import test_mapper from . import test_related_action fast_suite = [ ] checks = [ test_session, test_event, test_job, test_queue, test_worker, test_backend, test_producer, test_connector, test_mapper, test_related_action, ] ## Instruction: Remove deprecated fast_suite and check list for unit tests ## Code After: from . import test_session from . import test_event from . import test_job from . import test_queue from . import test_worker from . import test_backend from . import test_producer from . import test_connector from . import test_mapper from . import test_related_action
# ... existing code ... from . import test_connector from . import test_mapper from . import test_related_action # ... rest of the code ...
1f1153dc7087845e1909bae381af106d39c13912
tests/pinocchio/lib.h
tests/pinocchio/lib.h
class PinocchioTest : public Test { Q_OBJECT public: PinocchioTest(QObject *parent = 0); virtual ~PinocchioTest(); static inline QLatin1String pinocchioBusName() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_BUS_NAME_BASE "pinocchio"); } static inline QLatin1String pinocchioObjectPath() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_OBJECT_PATH_BASE "pinocchio"); } bool waitForPinocchio(uint timeoutMs = 5000); protected: QString mPinocchioPath; QString mPinocchioCtlPath; QProcess mPinocchio; QEventLoop *mLoop; virtual void initTestCaseImpl(); virtual void cleanupTestCaseImpl(); protected Q_SLOTS: void gotNameOwner(QDBusPendingCallWatcher* watcher); void onNameOwnerChanged(const QString&, const QString&, const QString&); };
class PinocchioTest : public Test { Q_OBJECT public: PinocchioTest(QObject *parent = 0); virtual ~PinocchioTest(); static inline QLatin1String pinocchioBusName() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_BUS_NAME_BASE "pinocchio"); } static inline QLatin1String pinocchioObjectPath() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_OBJECT_PATH_BASE "pinocchio"); } bool waitForPinocchio(uint timeoutMs = 5000); protected: QString mPinocchioPath; QString mPinocchioCtlPath; QProcess mPinocchio; virtual void initTestCaseImpl(); virtual void cleanupTestCaseImpl(); protected Q_SLOTS: void gotNameOwner(QDBusPendingCallWatcher* watcher); void onNameOwnerChanged(const QString&, const QString&, const QString&); };
Remove spurious mLoop member from PinocchioTest
Remove spurious mLoop member from PinocchioTest Having moved mLoop into the parent class, the fact that there's another one here (uninitialized, and used for about half the functions) breaks the tests.
C
lgpl-2.1
TelepathyIM/telepathy-qt,special/telepathy-qt-upstream,tiagosh/telepathy-qt,TelepathyQt/telepathy-qt,anantkamath/telepathy-qt,anantkamath/telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt4,tiagosh/telepathy-qt,TelepathyIM/telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt,special/telepathy-qt-upstream,TelepathyIM/telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt4,special/telepathy-qt-upstream,freedesktop-unofficial-mirror/telepathy__telepathy-qt,TelepathyIM/telepathy-qt,TelepathyQt/telepathy-qt,TelepathyQt/telepathy-qt,detrout/telepathy-qt,TelepathyQt/telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt4,detrout/telepathy-qt,detrout/telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt4,tiagosh/telepathy-qt,freedesktop-unofficial-mirror/telepathy__telepathy-qt,anantkamath/telepathy-qt,TelepathyIM/telepathy-qt,tiagosh/telepathy-qt
c
## Code Before: class PinocchioTest : public Test { Q_OBJECT public: PinocchioTest(QObject *parent = 0); virtual ~PinocchioTest(); static inline QLatin1String pinocchioBusName() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_BUS_NAME_BASE "pinocchio"); } static inline QLatin1String pinocchioObjectPath() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_OBJECT_PATH_BASE "pinocchio"); } bool waitForPinocchio(uint timeoutMs = 5000); protected: QString mPinocchioPath; QString mPinocchioCtlPath; QProcess mPinocchio; QEventLoop *mLoop; virtual void initTestCaseImpl(); virtual void cleanupTestCaseImpl(); protected Q_SLOTS: void gotNameOwner(QDBusPendingCallWatcher* watcher); void onNameOwnerChanged(const QString&, const QString&, const QString&); }; ## Instruction: Remove spurious mLoop member from PinocchioTest Having moved mLoop into the parent class, the fact that there's another one here (uninitialized, and used for about half the functions) breaks the tests. ## Code After: class PinocchioTest : public Test { Q_OBJECT public: PinocchioTest(QObject *parent = 0); virtual ~PinocchioTest(); static inline QLatin1String pinocchioBusName() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_BUS_NAME_BASE "pinocchio"); } static inline QLatin1String pinocchioObjectPath() { return QLatin1String( TELEPATHY_CONNECTION_MANAGER_OBJECT_PATH_BASE "pinocchio"); } bool waitForPinocchio(uint timeoutMs = 5000); protected: QString mPinocchioPath; QString mPinocchioCtlPath; QProcess mPinocchio; virtual void initTestCaseImpl(); virtual void cleanupTestCaseImpl(); protected Q_SLOTS: void gotNameOwner(QDBusPendingCallWatcher* watcher); void onNameOwnerChanged(const QString&, const QString&, const QString&); };
// ... existing code ... QString mPinocchioPath; QString mPinocchioCtlPath; QProcess mPinocchio; virtual void initTestCaseImpl(); // ... rest of the code ...
3cee41ff8a7af405fe3a6bfda214e4fe1a6d3c0f
oneflow/settings/snippets/db_production.py
oneflow/settings/snippets/db_production.py
DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
Add the test REDIS database.
Add the test REDIS database.
Python
agpl-3.0
1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow
python
## Code Before: DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2 ## Instruction: Add the test REDIS database. ## Code After: DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
... mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) ...
9ced61716167505875d3938ae01c08b61acc9392
randterrainpy/terrain.py
randterrainpy/terrain.py
"""This module is for the Terrain class, used for storing randomly generated terrain.""" class Terrain(object): """Container for a randomly generated area of terrain. Attributes: width (int): Width of generated terrain. length (int): Length of generated terrain. height_map (list): Map of heights of terrain. Values range from 0 to 1. """ def __init__(self, width, length): """Initializer for Terrain. Args: width (int): Width of terrain. length (int): Height of terrain. """ self.width = width self.length = length self.height_map = [[0 for _ in self.width]] * self.length def __getitem__(self, item): """Get an item at x-y coordinates. Args: item (tuple): 2-tuple of x and y coordinates. Returns: float: Height of terrain at coordinates, between 0 and 1. """ return self.height_map[item[1]][item[0]] def __setitem__(self, key, value): """Set the height of an item. Args: key (tuple): 2-tuple of x and y coordinates. value (float): New height of map at x and y coordinates, between 0 and 1. """ self.height_map[key[1]][key[0]] = value
"""This module is for the Terrain class, used for storing randomly generated terrain.""" class Terrain(object): """Container for a randomly generated area of terrain. Attributes: width (int): Width of generated terrain. length (int): Length of generated terrain. height_map (list): Map of heights of terrain. Values range from 0 to 1. """ def __init__(self, width, length): """Initializer for Terrain. Args: width (int): Width of terrain. length (int): Height of terrain. """ self.width = width self.length = length self.height_map = [[0 for _ in self.width]] * self.length def __getitem__(self, item): """Get an item at x-y coordinates. Args: item (tuple): 2-tuple of x and y coordinates. Returns: float: Height of terrain at coordinates, between 0 and 1. """ return self.height_map[item[1]][item[0]] def __setitem__(self, key, value): """Set the height of an item. Args: key (tuple): 2-tuple of x and y coordinates. value (float): New height of map at x and y coordinates, between 0 and 1. """ self.height_map[key[1]][key[0]] = value def __add__(self, other): """Add two terrains, height by height. Args: other (Terrain): Other terrain to add self to. Must have same dimensions as self. Returns: Terrain: Terrain of self and other added together. """ result = Terrain(self.width, self.length) for i in range(self.width): for j in range(self.length): result[i, j] = self[i, j] + other[i, j] return result
Add addition method to Terrain
Add addition method to Terrain
Python
mit
jackromo/RandTerrainPy
python
## Code Before: """This module is for the Terrain class, used for storing randomly generated terrain.""" class Terrain(object): """Container for a randomly generated area of terrain. Attributes: width (int): Width of generated terrain. length (int): Length of generated terrain. height_map (list): Map of heights of terrain. Values range from 0 to 1. """ def __init__(self, width, length): """Initializer for Terrain. Args: width (int): Width of terrain. length (int): Height of terrain. """ self.width = width self.length = length self.height_map = [[0 for _ in self.width]] * self.length def __getitem__(self, item): """Get an item at x-y coordinates. Args: item (tuple): 2-tuple of x and y coordinates. Returns: float: Height of terrain at coordinates, between 0 and 1. """ return self.height_map[item[1]][item[0]] def __setitem__(self, key, value): """Set the height of an item. Args: key (tuple): 2-tuple of x and y coordinates. value (float): New height of map at x and y coordinates, between 0 and 1. """ self.height_map[key[1]][key[0]] = value ## Instruction: Add addition method to Terrain ## Code After: """This module is for the Terrain class, used for storing randomly generated terrain.""" class Terrain(object): """Container for a randomly generated area of terrain. Attributes: width (int): Width of generated terrain. length (int): Length of generated terrain. height_map (list): Map of heights of terrain. Values range from 0 to 1. """ def __init__(self, width, length): """Initializer for Terrain. Args: width (int): Width of terrain. length (int): Height of terrain. """ self.width = width self.length = length self.height_map = [[0 for _ in self.width]] * self.length def __getitem__(self, item): """Get an item at x-y coordinates. Args: item (tuple): 2-tuple of x and y coordinates. Returns: float: Height of terrain at coordinates, between 0 and 1. """ return self.height_map[item[1]][item[0]] def __setitem__(self, key, value): """Set the height of an item. Args: key (tuple): 2-tuple of x and y coordinates. value (float): New height of map at x and y coordinates, between 0 and 1. """ self.height_map[key[1]][key[0]] = value def __add__(self, other): """Add two terrains, height by height. Args: other (Terrain): Other terrain to add self to. Must have same dimensions as self. Returns: Terrain: Terrain of self and other added together. """ result = Terrain(self.width, self.length) for i in range(self.width): for j in range(self.length): result[i, j] = self[i, j] + other[i, j] return result
// ... existing code ... """ self.height_map[key[1]][key[0]] = value def __add__(self, other): """Add two terrains, height by height. Args: other (Terrain): Other terrain to add self to. Must have same dimensions as self. Returns: Terrain: Terrain of self and other added together. """ result = Terrain(self.width, self.length) for i in range(self.width): for j in range(self.length): result[i, j] = self[i, j] + other[i, j] return result // ... rest of the code ...
8a5f1fe18aad2f5078484ef36dffb33c920506ea
clients/java/src/main/java/io/aigar/game/models/GameState.java
clients/java/src/main/java/io/aigar/game/models/GameState.java
package io.aigar.game.models; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.List; public class GameState { private int id; private int tick; private boolean paused; private int multiplier; private float timeLeft; private List<Player> players; private Resources resources; private Size map; private List<Virus> viruses; @JsonIgnore private Player me; @JsonIgnore private List<Player> enemies; public int getId() { return id; } public int getTick() { return tick; } public boolean isPaused() { return paused; } public int getMultiplier() { return multiplier; } public float getTimeLeft() { return timeLeft; } public List<Player> getPlayers() { return players; } public Resources getResources() { return resources; } public Size getMap() { return map; } public List<Virus> getViruses() { return viruses; } public Player getMe() { return me; } public void setMe(Player me) { this.me = me; } public List<Player> getEnemies() { return enemies; } public void setEnemies(List<Player> enemies) { this.enemies = enemies; } }
package io.aigar.game.models; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.List; public class GameState { private int id; private int tick; private boolean paused; private boolean disabledLeaderboard; private int multiplier; private float timeLeft; private List<Player> players; private Resources resources; private Size map; private List<Virus> viruses; @JsonIgnore private Player me; @JsonIgnore private List<Player> enemies; public int getId() { return id; } public int getTick() { return tick; } public boolean isPaused() { return paused; } public boolean isDisabledLeaderboard() { return disabledLeaderboard; } public int getMultiplier() { return multiplier; } public float getTimeLeft() { return timeLeft; } public List<Player> getPlayers() { return players; } public Resources getResources() { return resources; } public Size getMap() { return map; } public List<Virus> getViruses() { return viruses; } public Player getMe() { return me; } public void setMe(Player me) { this.me = me; } public List<Player> getEnemies() { return enemies; } public void setEnemies(List<Player> enemies) { this.enemies = enemies; } }
Fix java client to accept new parameters
Fix java client to accept new parameters
Java
mit
DrPandemic/aigar.io,DrPandemic/aigar.io,DrPandemic/aigar.io,DrPandemic/aigar.io,DrPandemic/aigar.io,DrPandemic/aigar.io
java
## Code Before: package io.aigar.game.models; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.List; public class GameState { private int id; private int tick; private boolean paused; private int multiplier; private float timeLeft; private List<Player> players; private Resources resources; private Size map; private List<Virus> viruses; @JsonIgnore private Player me; @JsonIgnore private List<Player> enemies; public int getId() { return id; } public int getTick() { return tick; } public boolean isPaused() { return paused; } public int getMultiplier() { return multiplier; } public float getTimeLeft() { return timeLeft; } public List<Player> getPlayers() { return players; } public Resources getResources() { return resources; } public Size getMap() { return map; } public List<Virus> getViruses() { return viruses; } public Player getMe() { return me; } public void setMe(Player me) { this.me = me; } public List<Player> getEnemies() { return enemies; } public void setEnemies(List<Player> enemies) { this.enemies = enemies; } } ## Instruction: Fix java client to accept new parameters ## Code After: package io.aigar.game.models; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.List; public class GameState { private int id; private int tick; private boolean paused; private boolean disabledLeaderboard; private int multiplier; private float timeLeft; private List<Player> players; private Resources resources; private Size map; private List<Virus> viruses; @JsonIgnore private Player me; @JsonIgnore private List<Player> enemies; public int getId() { return id; } public int getTick() { return tick; } public boolean isPaused() { return paused; } public boolean isDisabledLeaderboard() { return disabledLeaderboard; } public int getMultiplier() { return multiplier; } public float getTimeLeft() { return timeLeft; } public List<Player> getPlayers() { return players; } public Resources getResources() { return resources; } public Size getMap() { return map; } public List<Virus> getViruses() { return viruses; } public Player getMe() { return me; } public void setMe(Player me) { this.me = me; } public List<Player> getEnemies() { return enemies; } public void setEnemies(List<Player> enemies) { this.enemies = enemies; } }
... private int id; private int tick; private boolean paused; private boolean disabledLeaderboard; private int multiplier; private float timeLeft; private List<Player> players; ... public boolean isPaused() { return paused; } public boolean isDisabledLeaderboard() { return disabledLeaderboard; } public int getMultiplier() { return multiplier; ...
5f44348e2126e13d50cf4f9f3195ae4979ab63c9
include/ethernet.h
include/ethernet.h
struct eth_hdr { unsigned char dst_mac[6]; unsigned char src_mac[6]; short ethertype; char* payload; }; struct eth_hdr* init_eth_hdr(char* buf); void print_eth_hdr(struct eth_hdr *ehdr);
struct eth_hdr { unsigned char dst_mac[6]; unsigned char src_mac[6]; unsigned short ethertype; unsigned char* payload; }; struct eth_hdr* init_eth_hdr(char* buf); void print_eth_hdr(struct eth_hdr *ehdr); #endif
Make eth header fields unsigned
Make eth header fields unsigned
C
mit
saminiir/level-ip,saminiir/level-ip
c
## Code Before: struct eth_hdr { unsigned char dst_mac[6]; unsigned char src_mac[6]; short ethertype; char* payload; }; struct eth_hdr* init_eth_hdr(char* buf); void print_eth_hdr(struct eth_hdr *ehdr); ## Instruction: Make eth header fields unsigned ## Code After: struct eth_hdr { unsigned char dst_mac[6]; unsigned char src_mac[6]; unsigned short ethertype; unsigned char* payload; }; struct eth_hdr* init_eth_hdr(char* buf); void print_eth_hdr(struct eth_hdr *ehdr); #endif
... struct eth_hdr { unsigned char dst_mac[6]; unsigned char src_mac[6]; unsigned short ethertype; unsigned char* payload; }; struct eth_hdr* init_eth_hdr(char* buf); void print_eth_hdr(struct eth_hdr *ehdr); #endif ...
1c0ea1a102ed91342ce0d609733426b8a07cd67d
easy_thumbnails/tests/apps.py
easy_thumbnails/tests/apps.py
from django.apps import AppConfig class EasyThumbnailsTestConfig(AppConfig): name = 'easy_thumbnails.tests' label = 'easy_thumbnails_tests'
try: from django.apps import AppConfig except ImportError: # Early Django versions import everything in test, avoid the failure due to # AppConfig only existing in 1.7+ AppConfig = object class EasyThumbnailsTestConfig(AppConfig): name = 'easy_thumbnails.tests' label = 'easy_thumbnails_tests'
Fix an import error for old django versions
Fix an import error for old django versions Fixes #371
Python
bsd-3-clause
SmileyChris/easy-thumbnails
python
## Code Before: from django.apps import AppConfig class EasyThumbnailsTestConfig(AppConfig): name = 'easy_thumbnails.tests' label = 'easy_thumbnails_tests' ## Instruction: Fix an import error for old django versions Fixes #371 ## Code After: try: from django.apps import AppConfig except ImportError: # Early Django versions import everything in test, avoid the failure due to # AppConfig only existing in 1.7+ AppConfig = object class EasyThumbnailsTestConfig(AppConfig): name = 'easy_thumbnails.tests' label = 'easy_thumbnails_tests'
# ... existing code ... try: from django.apps import AppConfig except ImportError: # Early Django versions import everything in test, avoid the failure due to # AppConfig only existing in 1.7+ AppConfig = object class EasyThumbnailsTestConfig(AppConfig): # ... rest of the code ...
7f396cfd88fd08466db2f9cd77bf40e91345d2a2
nodeconductor/core/authentication.py
nodeconductor/core/authentication.py
from __future__ import unicode_literals import nodeconductor.logging.middleware import rest_framework.authentication def user_capturing_auth(auth): class CapturingAuthentication(auth): def authenticate(self, request): result = super(CapturingAuthentication, self).authenticate(request) if result is not None: user, _ = result nodeconductor.logging.middleware.set_current_user(user) return result return CapturingAuthentication SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication) TokenAuthentication = user_capturing_auth(rest_framework.authentication.TokenAuthentication)
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ import rest_framework.authentication from rest_framework import exceptions import nodeconductor.logging.middleware TOKEN_KEY = 'x-auth-token' class TokenAuthentication(rest_framework.authentication.TokenAuthentication): """ Custom token-based authentication. Use TOKEN_KEY from request query parameters if authentication token was not found in header. """ def get_authorization_value(self, request): auth = rest_framework.authentication.get_authorization_header(request) if not auth: auth = request.query_params.get(TOKEN_KEY, '') return auth def authenticate(self, request): auth = self.get_authorization_value(request).split() if not auth or auth[0].lower() != b'token': return None if len(auth) == 1: msg = _('Invalid token. No credentials provided.') raise exceptions.AuthenticationFailed(msg) elif len(auth) > 2: msg = _('Invalid token. Token string should not contain spaces.') raise exceptions.AuthenticationFailed(msg) return self.authenticate_credentials(auth[1]) def user_capturing_auth(auth): class CapturingAuthentication(auth): def authenticate(self, request): result = super(CapturingAuthentication, self).authenticate(request) if result is not None: user, _ = result nodeconductor.logging.middleware.set_current_user(user) return result return CapturingAuthentication SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication) TokenAuthentication = user_capturing_auth(TokenAuthentication)
Use get parameter in token auth (nc-544)
Use get parameter in token auth (nc-544)
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
python
## Code Before: from __future__ import unicode_literals import nodeconductor.logging.middleware import rest_framework.authentication def user_capturing_auth(auth): class CapturingAuthentication(auth): def authenticate(self, request): result = super(CapturingAuthentication, self).authenticate(request) if result is not None: user, _ = result nodeconductor.logging.middleware.set_current_user(user) return result return CapturingAuthentication SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication) TokenAuthentication = user_capturing_auth(rest_framework.authentication.TokenAuthentication) ## Instruction: Use get parameter in token auth (nc-544) ## Code After: from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ import rest_framework.authentication from rest_framework import exceptions import nodeconductor.logging.middleware TOKEN_KEY = 'x-auth-token' class TokenAuthentication(rest_framework.authentication.TokenAuthentication): """ Custom token-based authentication. Use TOKEN_KEY from request query parameters if authentication token was not found in header. """ def get_authorization_value(self, request): auth = rest_framework.authentication.get_authorization_header(request) if not auth: auth = request.query_params.get(TOKEN_KEY, '') return auth def authenticate(self, request): auth = self.get_authorization_value(request).split() if not auth or auth[0].lower() != b'token': return None if len(auth) == 1: msg = _('Invalid token. No credentials provided.') raise exceptions.AuthenticationFailed(msg) elif len(auth) > 2: msg = _('Invalid token. Token string should not contain spaces.') raise exceptions.AuthenticationFailed(msg) return self.authenticate_credentials(auth[1]) def user_capturing_auth(auth): class CapturingAuthentication(auth): def authenticate(self, request): result = super(CapturingAuthentication, self).authenticate(request) if result is not None: user, _ = result nodeconductor.logging.middleware.set_current_user(user) return result return CapturingAuthentication SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication) TokenAuthentication = user_capturing_auth(TokenAuthentication)
# ... existing code ... from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ import rest_framework.authentication from rest_framework import exceptions import nodeconductor.logging.middleware TOKEN_KEY = 'x-auth-token' class TokenAuthentication(rest_framework.authentication.TokenAuthentication): """ Custom token-based authentication. Use TOKEN_KEY from request query parameters if authentication token was not found in header. """ def get_authorization_value(self, request): auth = rest_framework.authentication.get_authorization_header(request) if not auth: auth = request.query_params.get(TOKEN_KEY, '') return auth def authenticate(self, request): auth = self.get_authorization_value(request).split() if not auth or auth[0].lower() != b'token': return None if len(auth) == 1: msg = _('Invalid token. No credentials provided.') raise exceptions.AuthenticationFailed(msg) elif len(auth) > 2: msg = _('Invalid token. Token string should not contain spaces.') raise exceptions.AuthenticationFailed(msg) return self.authenticate_credentials(auth[1]) def user_capturing_auth(auth): # ... modified code ... return CapturingAuthentication SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication) TokenAuthentication = user_capturing_auth(TokenAuthentication) # ... rest of the code ...
eaa2b2534e5d7115be1d5d9efcfe1ce28e0b0721
fast-xattr-test/fast-xattr-test/main.c
fast-xattr-test/fast-xattr-test/main.c
// // main.c // fast-xattr-test // // Created by David Schlachter on 2015-07-09. // Copyright (c) 2015 David Schlachter. All rights reserved. // #include <stdio.h> int main(int argc, const char * argv[]) { // insert code here... printf("Hello, World!\n"); return 0; }
// // main.c // fast-xattr-test // // Created by David Schlachter on 2015-07-09. // Copyright (c) 2015 David Schlachter. All rights reserved. // #include <stdio.h> #include <sys/xattr.h> #include <stdlib.h> int main(int argc, const char * argv[]) { const char *path; const char *name; void *value = malloc(15); size_t size; u_int32_t position; int options = 0; path = argv[1]; name = argv[2]; size = 14; position = 0; if (!getxattr(path, name, value, size, position, options)) { return 0; } else { return 1; }; }
Set up the getxattr function
Set up the getxattr function
C
mit
davidschlachter/fast-xattr-test
c
## Code Before: // // main.c // fast-xattr-test // // Created by David Schlachter on 2015-07-09. // Copyright (c) 2015 David Schlachter. All rights reserved. // #include <stdio.h> int main(int argc, const char * argv[]) { // insert code here... printf("Hello, World!\n"); return 0; } ## Instruction: Set up the getxattr function ## Code After: // // main.c // fast-xattr-test // // Created by David Schlachter on 2015-07-09. // Copyright (c) 2015 David Schlachter. All rights reserved. // #include <stdio.h> #include <sys/xattr.h> #include <stdlib.h> int main(int argc, const char * argv[]) { const char *path; const char *name; void *value = malloc(15); size_t size; u_int32_t position; int options = 0; path = argv[1]; name = argv[2]; size = 14; position = 0; if (!getxattr(path, name, value, size, position, options)) { return 0; } else { return 1; }; }
... // #include <stdio.h> #include <sys/xattr.h> #include <stdlib.h> int main(int argc, const char * argv[]) { const char *path; const char *name; void *value = malloc(15); size_t size; u_int32_t position; int options = 0; path = argv[1]; name = argv[2]; size = 14; position = 0; if (!getxattr(path, name, value, size, position, options)) { return 0; } else { return 1; }; } ...
fc6b3df720ac05b715ae6478367f79e834c47c26
pi_broadcast_service/rabbit.py
pi_broadcast_service/rabbit.py
import json import pika class Publisher(object): def __init__(self, rabbit_url, exchange): self._rabbit_url = rabbit_url self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() def send(self, routing_key, message): self._channel.basic_publish( exchange=self._exchange, routing_key=routing_key, body=json.dumps(message)) def stop(self): self._connection.close()
import json import pika class Publisher(object): def __init__(self, rabbit_url, exchange): self._rabbit_url = rabbit_url self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() self._channel.exchange_declare(exchange=self._exchange, type='direct') def send(self, routing_key, message): self._channel.basic_publish( exchange=self._exchange, routing_key=routing_key, body=json.dumps(message)) def stop(self): self._connection.close()
Make sure the exchange is there first
Make sure the exchange is there first
Python
mit
projectweekend/Pi-Broadcast-Service
python
## Code Before: import json import pika class Publisher(object): def __init__(self, rabbit_url, exchange): self._rabbit_url = rabbit_url self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() def send(self, routing_key, message): self._channel.basic_publish( exchange=self._exchange, routing_key=routing_key, body=json.dumps(message)) def stop(self): self._connection.close() ## Instruction: Make sure the exchange is there first ## Code After: import json import pika class Publisher(object): def __init__(self, rabbit_url, exchange): self._rabbit_url = rabbit_url self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() self._channel.exchange_declare(exchange=self._exchange, type='direct') def send(self, routing_key, message): self._channel.basic_publish( exchange=self._exchange, routing_key=routing_key, body=json.dumps(message)) def stop(self): self._connection.close()
// ... existing code ... self._exchange = exchange self._connection = pika.BlockingConnection(pika.URLParameters(self._rabbit_url)) self._channel = self._connection.channel() self._channel.exchange_declare(exchange=self._exchange, type='direct') def send(self, routing_key, message): self._channel.basic_publish( // ... rest of the code ...
e4e4e8d5c3acf5851d33700f8b55aa2e1f9c33f2
server/app/migrations/0003_region.py
server/app/migrations/0003_region.py
import os import json from django.db import migrations from django.conf import settings def dfs(apps, root, deep, superset=None, leaf=True): Region = apps.get_model('app', 'Region') if isinstance(root, dict): for k, v in root.items(): s = dfs(apps, k, deep, superset, not v) dfs(apps, v, deep + 1, s) elif isinstance(root, list): for k in root: dfs(apps, k, deep, superset, True) else: region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf) region.save() #print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name)) return region def add_region(apps, schema_editor): if settings.TESTING: data_file = "regions_for_test.json" else: data_file = "regions.txt" regions = json.load(open(os.path.join(os.path.dirname(__file__), data_file))) #print("添加省份") dfs(apps, regions, 1) class Migration(migrations.Migration): dependencies = [ ('app', '0002_subject'), ] operations = [ migrations.RunPython(add_region), ]
import os import json from collections import OrderedDict from django.db import migrations from django.conf import settings def dfs(apps, root, deep, superset=None, leaf=True): Region = apps.get_model('app', 'Region') if isinstance(root, dict): for k, v in root.items(): s = dfs(apps, k, deep, superset, not v) dfs(apps, v, deep + 1, s) elif isinstance(root, list): for k in root: dfs(apps, k, deep, superset, True) else: region = Region( name=root, superset=superset, admin_level=deep, leaf=leaf) region.save() return region def add_region(apps, schema_editor): if settings.TESTING: data_file = "regions_for_test.json" else: data_file = "regions.txt" regions = json.load(open( os.path.join(os.path.dirname(__file__), data_file)), object_pairs_hook=OrderedDict) dfs(apps, regions, 1) class Migration(migrations.Migration): dependencies = [ ('app', '0002_subject'), ] operations = [ migrations.RunPython(add_region), ]
Make ID of regions be definite.
SERVER-242: Make ID of regions be definite.
Python
mit
malaonline/Server,malaonline/iOS,malaonline/Android,malaonline/Android,malaonline/iOS,malaonline/Android,malaonline/Server,malaonline/Server,malaonline/iOS,malaonline/Server
python
## Code Before: import os import json from django.db import migrations from django.conf import settings def dfs(apps, root, deep, superset=None, leaf=True): Region = apps.get_model('app', 'Region') if isinstance(root, dict): for k, v in root.items(): s = dfs(apps, k, deep, superset, not v) dfs(apps, v, deep + 1, s) elif isinstance(root, list): for k in root: dfs(apps, k, deep, superset, True) else: region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf) region.save() #print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name)) return region def add_region(apps, schema_editor): if settings.TESTING: data_file = "regions_for_test.json" else: data_file = "regions.txt" regions = json.load(open(os.path.join(os.path.dirname(__file__), data_file))) #print("添加省份") dfs(apps, regions, 1) class Migration(migrations.Migration): dependencies = [ ('app', '0002_subject'), ] operations = [ migrations.RunPython(add_region), ] ## Instruction: SERVER-242: Make ID of regions be definite. ## Code After: import os import json from collections import OrderedDict from django.db import migrations from django.conf import settings def dfs(apps, root, deep, superset=None, leaf=True): Region = apps.get_model('app', 'Region') if isinstance(root, dict): for k, v in root.items(): s = dfs(apps, k, deep, superset, not v) dfs(apps, v, deep + 1, s) elif isinstance(root, list): for k in root: dfs(apps, k, deep, superset, True) else: region = Region( name=root, superset=superset, admin_level=deep, leaf=leaf) region.save() return region def add_region(apps, schema_editor): if settings.TESTING: data_file = "regions_for_test.json" else: data_file = "regions.txt" regions = json.load(open( os.path.join(os.path.dirname(__file__), data_file)), object_pairs_hook=OrderedDict) dfs(apps, regions, 1) class Migration(migrations.Migration): dependencies = [ ('app', '0002_subject'), ] operations = [ migrations.RunPython(add_region), ]
# ... existing code ... import os import json from collections import OrderedDict from django.db import migrations from django.conf import settings # ... modified code ... for k in root: dfs(apps, k, deep, superset, True) else: region = Region( name=root, superset=superset, admin_level=deep, leaf=leaf) region.save() return region ... data_file = "regions_for_test.json" else: data_file = "regions.txt" regions = json.load(open( os.path.join(os.path.dirname(__file__), data_file)), object_pairs_hook=OrderedDict) dfs(apps, regions, 1) # ... rest of the code ...
86d4aa3e5895d5f7ac029df82c63e2b1e29e8c2d
spc/types.py
spc/types.py
from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block. # # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
Add note that Struct's field collection is an OrderedDict
Add note that Struct's field collection is an OrderedDict
Python
mit
adamnew123456/spc,adamnew123456/spc
python
## Code Before: from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl) ## Instruction: Add note that Struct's field collection is an OrderedDict ## Code After: from collections import namedtuple IntegerType = namedtuple('IntegerType', []) Integer = IntegerType() ByteType = namedtuple('ByteType', []) Byte = ByteType() PointerTo = namedtuple('PointerTo', ['type']) ArrayOf = namedtuple('ArrayOf', ['type', 'count']) FunctionPointer = namedtuple('FunctionPointer', ['return_type', 'params']) TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block. # # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the # actual reified type is FunctionPointer FunctionDecl = namedtuple('FunctionDecl', ['return_type', 'params']) AliasDef = namedtuple('AliasDef', ['type']) # Raw types are types which can be used as variables RAW_TYPES = (types.IntegerType, types.ByteType, types.TypeName, types.PointerTo, types.ArrayOf, types.FunctionPointer) def decay_if_array(type_obj): """ Decays arrays types into pointers. """ if isinstance(type_obj, types.ArrayOf): return type_obj.PointerTo(type_obj.type) else: return type_obj def func_decl_to_ptr(func_decl): """ Converts a function declaration to a pointer. """ return FunctionPointer(*func_decl)
// ... existing code ... TypeName = namedtuple('TypeName', ['name']) # Structure is a bit of an oddity - it can't actually be used in 'raw form' # by the user, but is always aliased in a declare block. # # Also, fields is an OrderedDict, since the order of fields matters for layout, Struct = namedtuple('Struct', ['fields']) # This is used merely to record that a function has been declared - the // ... rest of the code ...
de731520f9ad3f871a976fd597ff1a4d8acf155f
tests/modules/test_enumerable.py
tests/modules/test_enumerable.py
class TestEnumberable(object): def test_inject(self, ec): w_res = ec.space.execute(ec, """ return (5..10).inject(1) do |prod, n| prod * n end """) assert ec.space.int_w(w_res) == 15120 w_res = ec.space.execute(ec, """ return (1..10).inject 0 do |sum, n| sum + n end """) assert ec.space.int_w(w_res) == 45 def test_each_with_index(self, ec): w_res = ec.space.execute(ec, """ result = [] (5..10).each_with_index do |n, idx| result << [n, idx] end return result """) assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]] def test_all(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 3 end """) assert ec.space.w_true
class TestEnumberable(object): def test_inject(self, ec): w_res = ec.space.execute(ec, """ return (5..10).inject(1) do |prod, n| prod * n end """) assert ec.space.int_w(w_res) == 15120 w_res = ec.space.execute(ec, """ return (1..10).inject 0 do |sum, n| sum + n end """) assert ec.space.int_w(w_res) == 45 def test_each_with_index(self, ec): w_res = ec.space.execute(ec, """ result = [] (5..10).each_with_index do |n, idx| result << [n, idx] end return result """) assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]] def test_all(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 3 end """) assert w_res is ec.space.w_true def test_all_false(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 4 end """) assert w_res is ec.space.w_false
Fix true test, add false test
Fix true test, add false test
Python
bsd-3-clause
babelsberg/babelsberg-r,topazproject/topaz,topazproject/topaz,babelsberg/babelsberg-r,babelsberg/babelsberg-r,babelsberg/babelsberg-r,babelsberg/babelsberg-r,topazproject/topaz,kachick/topaz,kachick/topaz,topazproject/topaz,kachick/topaz
python
## Code Before: class TestEnumberable(object): def test_inject(self, ec): w_res = ec.space.execute(ec, """ return (5..10).inject(1) do |prod, n| prod * n end """) assert ec.space.int_w(w_res) == 15120 w_res = ec.space.execute(ec, """ return (1..10).inject 0 do |sum, n| sum + n end """) assert ec.space.int_w(w_res) == 45 def test_each_with_index(self, ec): w_res = ec.space.execute(ec, """ result = [] (5..10).each_with_index do |n, idx| result << [n, idx] end return result """) assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]] def test_all(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 3 end """) assert ec.space.w_true ## Instruction: Fix true test, add false test ## Code After: class TestEnumberable(object): def test_inject(self, ec): w_res = ec.space.execute(ec, """ return (5..10).inject(1) do |prod, n| prod * n end """) assert ec.space.int_w(w_res) == 15120 w_res = ec.space.execute(ec, """ return (1..10).inject 0 do |sum, n| sum + n end """) assert ec.space.int_w(w_res) == 45 def test_each_with_index(self, ec): w_res = ec.space.execute(ec, """ result = [] (5..10).each_with_index do |n, idx| result << [n, idx] end return result """) assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]] def test_all(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 3 end """) assert w_res is ec.space.w_true def test_all_false(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 4 end """) assert w_res is ec.space.w_false
# ... existing code ... word.length >= 3 end """) assert w_res is ec.space.w_true def test_all_false(self, ec): w_res = ec.space.execute(ec, """ return ["ant", "bear", "cat"].all? do |word| word.length >= 4 end """) assert w_res is ec.space.w_false # ... rest of the code ...
0bbf3a15770716f899dab2ae982b98a482e4dd50
examples/intro_android_demo/Build.kt
examples/intro_android_demo/Build.kt
import com.beust.kobalt.* import com.beust.kobalt.plugin.android.* import com.beust.kobalt.plugin.java.* val r = repos(file("d:\\android\\adt-bundle-windows-x86_64-20140321\\sdk\\extras\\android\\m2repository")) val p = javaProject { name = "intro_android_demo" group = "com.example" artifactId = name version = "0.1" directory = homeDir("android/intro_android_demo") sourceDirectories { listOf(path("app/src/main/java")) } dependencies { compile(file("app/libs/android-async-http-1.4.3.jar"), "com.android.support:support-v4:aar:23.0.1") } android { applicationId = name buildToolsVersion = "21.1.3" } }
import com.beust.kobalt.* import com.beust.kobalt.plugin.android.* import com.beust.kobalt.plugin.java.* val r = repos(file("d:\\android\\adt-bundle-windows-x86_64-20140321\\sdk\\extras\\android\\m2repository")) val p = javaProject { name = "intro_android_demo" group = "com.example" artifactId = name version = "0.1" directory = homeDir("android/intro_android_demo") sourceDirectories { listOf(path("app/src/main/java")) } dependencies { compile(file("app/libs/android-async-http-1.4.3.jar"), "com.android.support:support-v4:aar:21.0.3") } android { compileSdkVersion = "21" applicationId = "codepath.apps.demointroandroid" buildToolsVersion = "21.1.2" } }
Fix the build file for the examples.
Fix the build file for the examples.
Kotlin
apache-2.0
cbeust/kobalt,ethauvin/kobalt,cbeust/kobalt,evanchooly/kobalt,ethauvin/kobalt,ethauvin/kobalt,cbeust/kobalt,evanchooly/kobalt,evanchooly/kobalt,evanchooly/kobalt,ethauvin/kobalt
kotlin
## Code Before: import com.beust.kobalt.* import com.beust.kobalt.plugin.android.* import com.beust.kobalt.plugin.java.* val r = repos(file("d:\\android\\adt-bundle-windows-x86_64-20140321\\sdk\\extras\\android\\m2repository")) val p = javaProject { name = "intro_android_demo" group = "com.example" artifactId = name version = "0.1" directory = homeDir("android/intro_android_demo") sourceDirectories { listOf(path("app/src/main/java")) } dependencies { compile(file("app/libs/android-async-http-1.4.3.jar"), "com.android.support:support-v4:aar:23.0.1") } android { applicationId = name buildToolsVersion = "21.1.3" } } ## Instruction: Fix the build file for the examples. ## Code After: import com.beust.kobalt.* import com.beust.kobalt.plugin.android.* import com.beust.kobalt.plugin.java.* val r = repos(file("d:\\android\\adt-bundle-windows-x86_64-20140321\\sdk\\extras\\android\\m2repository")) val p = javaProject { name = "intro_android_demo" group = "com.example" artifactId = name version = "0.1" directory = homeDir("android/intro_android_demo") sourceDirectories { listOf(path("app/src/main/java")) } dependencies { compile(file("app/libs/android-async-http-1.4.3.jar"), "com.android.support:support-v4:aar:21.0.3") } android { compileSdkVersion = "21" applicationId = "codepath.apps.demointroandroid" buildToolsVersion = "21.1.2" } }
... dependencies { compile(file("app/libs/android-async-http-1.4.3.jar"), "com.android.support:support-v4:aar:21.0.3") } android { compileSdkVersion = "21" applicationId = "codepath.apps.demointroandroid" buildToolsVersion = "21.1.2" } } ...
394ed06411d3ca3ada66aab3bee796682895acc0
cla_backend/apps/core/testing.py
cla_backend/apps/core/testing.py
from django.core.management import call_command from django.test.utils import get_runner from django.conf import settings from django.db import connections, DEFAULT_DB_ALIAS # use jenkins runner if present otherwise the default django one if 'django_jenkins' in settings.INSTALLED_APPS: base_runner = 'django_jenkins.runner.CITestSuiteRunner' else: base_runner = 'django.test.runner.DiscoverRunner' class CLADiscoverRunner(get_runner(settings, base_runner)): """ Overrides the default Runner and loads the initial_groups fixture. This is because migrations are switched off during testing but we do need `initial_groups` in order for the tests to pass. """ def setup_databases(self, **kwargs): ret = super(CLADiscoverRunner, self).setup_databases(**kwargs) connection = connections[DEFAULT_DB_ALIAS] cursor = connection.cursor() cursor.execute('CREATE EXTENSION pgcrypto') call_command('loaddata', 'initial_groups') return ret
from django.core.management import call_command from django.test.utils import get_runner from django.conf import settings from django.db import connections, DEFAULT_DB_ALIAS # use jenkins runner if present otherwise the default django one if 'django_jenkins' in settings.INSTALLED_APPS: base_runner = 'django_jenkins.runner.CITestSuiteRunner' else: base_runner = 'django.test.runner.DiscoverRunner' class CLADiscoverRunner(get_runner(settings, base_runner)): """ Overrides the default Runner and loads the initial_groups fixture. This is because migrations are switched off during testing but we do need `initial_groups` in order for the tests to pass. """ def setup_databases(self, **kwargs): ret = super(CLADiscoverRunner, self).setup_databases(**kwargs) connection = connections[DEFAULT_DB_ALIAS] cursor = connection.cursor() cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto') call_command('loaddata', 'initial_groups') return ret
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
Python
mit
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
python
## Code Before: from django.core.management import call_command from django.test.utils import get_runner from django.conf import settings from django.db import connections, DEFAULT_DB_ALIAS # use jenkins runner if present otherwise the default django one if 'django_jenkins' in settings.INSTALLED_APPS: base_runner = 'django_jenkins.runner.CITestSuiteRunner' else: base_runner = 'django.test.runner.DiscoverRunner' class CLADiscoverRunner(get_runner(settings, base_runner)): """ Overrides the default Runner and loads the initial_groups fixture. This is because migrations are switched off during testing but we do need `initial_groups` in order for the tests to pass. """ def setup_databases(self, **kwargs): ret = super(CLADiscoverRunner, self).setup_databases(**kwargs) connection = connections[DEFAULT_DB_ALIAS] cursor = connection.cursor() cursor.execute('CREATE EXTENSION pgcrypto') call_command('loaddata', 'initial_groups') return ret ## Instruction: Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1) ## Code After: from django.core.management import call_command from django.test.utils import get_runner from django.conf import settings from django.db import connections, DEFAULT_DB_ALIAS # use jenkins runner if present otherwise the default django one if 'django_jenkins' in settings.INSTALLED_APPS: base_runner = 'django_jenkins.runner.CITestSuiteRunner' else: base_runner = 'django.test.runner.DiscoverRunner' class CLADiscoverRunner(get_runner(settings, base_runner)): """ Overrides the default Runner and loads the initial_groups fixture. This is because migrations are switched off during testing but we do need `initial_groups` in order for the tests to pass. """ def setup_databases(self, **kwargs): ret = super(CLADiscoverRunner, self).setup_databases(**kwargs) connection = connections[DEFAULT_DB_ALIAS] cursor = connection.cursor() cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto') call_command('loaddata', 'initial_groups') return ret
// ... existing code ... ret = super(CLADiscoverRunner, self).setup_databases(**kwargs) connection = connections[DEFAULT_DB_ALIAS] cursor = connection.cursor() cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto') call_command('loaddata', 'initial_groups') return ret // ... rest of the code ...
8836c5a5274c2a3573d2e706b67a1288de6e59bd
utils/repl.py
utils/repl.py
from nex.state import GlobalState from nex.reader import Reader, EndOfFile from nex.lexer import Lexer from nex.instructioner import Instructioner from nex.banisher import Banisher from nex.parsing.command_parser import command_parser from nex.parsing.utils import ChunkGrabber from nex.box_writer import write_to_file from nex.utils import TidyEnd reader = Reader() state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts']) font_id = state.define_new_font(file_name='cmr10', at_clause=None) state.select_font(is_global=True, font_id=font_id) lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code) instructioner = Instructioner(lexer) banisher = Banisher(instructioner, state, reader) command_grabber = ChunkGrabber(banisher, command_parser) while True: s = input('In: ') reader.insert_string(s + '\n') try: state.execute_commands(command_grabber, banisher, reader) except TidyEnd: break # out_path = sys.stdout.buffer write_to_file(state, 'done.dvi')
from nex.state import GlobalState from nex.reader import Reader from nex.lexer import Lexer from nex.instructioner import Instructioner from nex.banisher import Banisher from nex.parsing.command_parser import command_parser from nex.parsing.utils import ChunkGrabber from nex.box_writer import write_to_dvi_file from nex.utils import TidyEnd reader = Reader() state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts']) font_id = state.define_new_font(file_name='cmr10', at_clause=None) state.select_font(is_global=True, font_id=font_id) lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code) instructioner = Instructioner(lexer) banisher = Banisher(instructioner, state, reader) command_grabber = ChunkGrabber(banisher, command_parser) reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex') state.execute_commands(command_grabber, banisher, reader) while True: s = input('In: ') reader.insert_string(s + '\n') try: state.execute_commands(command_grabber, banisher, reader) except TidyEnd: break # out_path = sys.stdout.buffer write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
Insert plain.tex into REPL state
Insert plain.tex into REPL state
Python
mit
eddiejessup/nex
python
## Code Before: from nex.state import GlobalState from nex.reader import Reader, EndOfFile from nex.lexer import Lexer from nex.instructioner import Instructioner from nex.banisher import Banisher from nex.parsing.command_parser import command_parser from nex.parsing.utils import ChunkGrabber from nex.box_writer import write_to_file from nex.utils import TidyEnd reader = Reader() state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts']) font_id = state.define_new_font(file_name='cmr10', at_clause=None) state.select_font(is_global=True, font_id=font_id) lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code) instructioner = Instructioner(lexer) banisher = Banisher(instructioner, state, reader) command_grabber = ChunkGrabber(banisher, command_parser) while True: s = input('In: ') reader.insert_string(s + '\n') try: state.execute_commands(command_grabber, banisher, reader) except TidyEnd: break # out_path = sys.stdout.buffer write_to_file(state, 'done.dvi') ## Instruction: Insert plain.tex into REPL state ## Code After: from nex.state import GlobalState from nex.reader import Reader from nex.lexer import Lexer from nex.instructioner import Instructioner from nex.banisher import Banisher from nex.parsing.command_parser import command_parser from nex.parsing.utils import ChunkGrabber from nex.box_writer import write_to_dvi_file from nex.utils import TidyEnd reader = Reader() state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts']) font_id = state.define_new_font(file_name='cmr10', at_clause=None) state.select_font(is_global=True, font_id=font_id) lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code) instructioner = Instructioner(lexer) banisher = Banisher(instructioner, state, reader) command_grabber = ChunkGrabber(banisher, command_parser) reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex') state.execute_commands(command_grabber, banisher, reader) while True: s = input('In: ') reader.insert_string(s + '\n') try: state.execute_commands(command_grabber, banisher, reader) except TidyEnd: break # out_path = sys.stdout.buffer write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
... from nex.state import GlobalState from nex.reader import Reader from nex.lexer import Lexer from nex.instructioner import Instructioner from nex.banisher import Banisher from nex.parsing.command_parser import command_parser from nex.parsing.utils import ChunkGrabber from nex.box_writer import write_to_dvi_file from nex.utils import TidyEnd reader = Reader() state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts']) font_id = state.define_new_font(file_name='cmr10', at_clause=None) state.select_font(is_global=True, font_id=font_id) ... instructioner = Instructioner(lexer) banisher = Banisher(instructioner, state, reader) command_grabber = ChunkGrabber(banisher, command_parser) reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex') state.execute_commands(command_grabber, banisher, reader) while True: s = input('In: ') ... except TidyEnd: break # out_path = sys.stdout.buffer write_to_dvi_file(state, 'repl.dvi', write_pdf=True) ...
d028f66964249bab928a29d92ab4cff075352546
integration/main.py
integration/main.py
from spec import Spec, skip class Tessera(Spec): def is_importable(self): import tessera assert tessera.app assert tessera.db
from contextlib import contextmanager import os from shutil import rmtree from tempfile import mkdtemp from spec import Spec, skip @contextmanager def _tmp(): try: tempdir = mkdtemp() yield tempdir finally: rmtree(tempdir) @contextmanager def _db(): with _tmp() as tempdir: from tessera import app, db # Temp db location path = os.path.join(tempdir, 'tessera.db') dbfile = 'sqlite:///{0}'.format(path) # Inform app of that location & setup app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile)) db.create_all() # Let test have its way with that temp db yield db class Config(object): pass def _config(**options): config = Config() for key, value in options.iteritems(): setattr(config, key, value) class Tessera(Spec): def is_importable(self): import tessera assert tessera.app assert tessera.db def creates_a_nonempty_database_schema(self): with _db() as db: meta = db.MetaData() meta.reflect(db.engine) assert len(meta.tables) > 0
Add temp DB test harness + basic test
Add temp DB test harness + basic test
Python
apache-2.0
tessera-metrics/tessera,jmptrader/tessera,aalpern/tessera,Slach/tessera,filippog/tessera,aalpern/tessera,aalpern/tessera,section-io/tessera,urbanairship/tessera,aalpern/tessera,urbanairship/tessera,Slach/tessera,jmptrader/tessera,urbanairship/tessera,Slach/tessera,urbanairship/tessera,urbanairship/tessera,tessera-metrics/tessera,section-io/tessera,aalpern/tessera,tessera-metrics/tessera,filippog/tessera,tessera-metrics/tessera,section-io/tessera,jmptrader/tessera,jmptrader/tessera,filippog/tessera,section-io/tessera,Slach/tessera,jmptrader/tessera,tessera-metrics/tessera
python
## Code Before: from spec import Spec, skip class Tessera(Spec): def is_importable(self): import tessera assert tessera.app assert tessera.db ## Instruction: Add temp DB test harness + basic test ## Code After: from contextlib import contextmanager import os from shutil import rmtree from tempfile import mkdtemp from spec import Spec, skip @contextmanager def _tmp(): try: tempdir = mkdtemp() yield tempdir finally: rmtree(tempdir) @contextmanager def _db(): with _tmp() as tempdir: from tessera import app, db # Temp db location path = os.path.join(tempdir, 'tessera.db') dbfile = 'sqlite:///{0}'.format(path) # Inform app of that location & setup app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile)) db.create_all() # Let test have its way with that temp db yield db class Config(object): pass def _config(**options): config = Config() for key, value in options.iteritems(): setattr(config, key, value) class Tessera(Spec): def is_importable(self): import tessera assert tessera.app assert tessera.db def creates_a_nonempty_database_schema(self): with _db() as db: meta = db.MetaData() meta.reflect(db.engine) assert len(meta.tables) > 0
... from contextlib import contextmanager import os from shutil import rmtree from tempfile import mkdtemp from spec import Spec, skip @contextmanager def _tmp(): try: tempdir = mkdtemp() yield tempdir finally: rmtree(tempdir) @contextmanager def _db(): with _tmp() as tempdir: from tessera import app, db # Temp db location path = os.path.join(tempdir, 'tessera.db') dbfile = 'sqlite:///{0}'.format(path) # Inform app of that location & setup app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile)) db.create_all() # Let test have its way with that temp db yield db class Config(object): pass def _config(**options): config = Config() for key, value in options.iteritems(): setattr(config, key, value) class Tessera(Spec): ... import tessera assert tessera.app assert tessera.db def creates_a_nonempty_database_schema(self): with _db() as db: meta = db.MetaData() meta.reflect(db.engine) assert len(meta.tables) > 0 ...
a04f3d167011e6d0e50d6a088f5877769fbedaa2
testfixtures/shop_order.py
testfixtures/shop_order.py
from byceps.services.shop.order.models.order import Order from byceps.services.shop.order.models.orderer import Orderer from byceps.services.shop.order import service from byceps.services.shop.order.transfer.models import PaymentMethod ANY_ORDER_NUMBER = 'AEC-03-B00074' def create_orderer(user): return Orderer( user.id, user.detail.first_names, user.detail.last_name, user.detail.country, user.detail.zip_code, user.detail.city, user.detail.street) def create_order(shop_id, placed_by, *, order_number=ANY_ORDER_NUMBER, payment_method=PaymentMethod.bank_transfer, shipping_required=False): order = Order( shop_id, order_number, placed_by.id, placed_by.detail.first_names, placed_by.detail.last_name, placed_by.detail.country, placed_by.detail.zip_code, placed_by.detail.city, placed_by.detail.street, payment_method, ) order.shipping_required = shipping_required return order def create_order_item(order, article, quantity): return service._add_article_to_order(order, article, quantity)
from byceps.services.shop.order.models.order import Order from byceps.services.shop.order.models.orderer import Orderer from byceps.services.shop.order.transfer.models import PaymentMethod ANY_ORDER_NUMBER = 'AEC-03-B00074' def create_orderer(user): return Orderer( user.id, user.detail.first_names, user.detail.last_name, user.detail.country, user.detail.zip_code, user.detail.city, user.detail.street) def create_order(shop_id, placed_by, *, order_number=ANY_ORDER_NUMBER, payment_method=PaymentMethod.bank_transfer, shipping_required=False): order = Order( shop_id, order_number, placed_by.id, placed_by.detail.first_names, placed_by.detail.last_name, placed_by.detail.country, placed_by.detail.zip_code, placed_by.detail.city, placed_by.detail.street, payment_method, ) order.shipping_required = shipping_required return order
Remove unused test fixture `create_order_item`
Remove unused test fixture `create_order_item`
Python
bsd-3-clause
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps
python
## Code Before: from byceps.services.shop.order.models.order import Order from byceps.services.shop.order.models.orderer import Orderer from byceps.services.shop.order import service from byceps.services.shop.order.transfer.models import PaymentMethod ANY_ORDER_NUMBER = 'AEC-03-B00074' def create_orderer(user): return Orderer( user.id, user.detail.first_names, user.detail.last_name, user.detail.country, user.detail.zip_code, user.detail.city, user.detail.street) def create_order(shop_id, placed_by, *, order_number=ANY_ORDER_NUMBER, payment_method=PaymentMethod.bank_transfer, shipping_required=False): order = Order( shop_id, order_number, placed_by.id, placed_by.detail.first_names, placed_by.detail.last_name, placed_by.detail.country, placed_by.detail.zip_code, placed_by.detail.city, placed_by.detail.street, payment_method, ) order.shipping_required = shipping_required return order def create_order_item(order, article, quantity): return service._add_article_to_order(order, article, quantity) ## Instruction: Remove unused test fixture `create_order_item` ## Code After: from byceps.services.shop.order.models.order import Order from byceps.services.shop.order.models.orderer import Orderer from byceps.services.shop.order.transfer.models import PaymentMethod ANY_ORDER_NUMBER = 'AEC-03-B00074' def create_orderer(user): return Orderer( user.id, user.detail.first_names, user.detail.last_name, user.detail.country, user.detail.zip_code, user.detail.city, user.detail.street) def create_order(shop_id, placed_by, *, order_number=ANY_ORDER_NUMBER, payment_method=PaymentMethod.bank_transfer, shipping_required=False): order = Order( shop_id, order_number, placed_by.id, placed_by.detail.first_names, placed_by.detail.last_name, placed_by.detail.country, placed_by.detail.zip_code, placed_by.detail.city, placed_by.detail.street, payment_method, ) order.shipping_required = shipping_required return order
// ... existing code ... from byceps.services.shop.order.models.order import Order from byceps.services.shop.order.models.orderer import Orderer from byceps.services.shop.order.transfer.models import PaymentMethod // ... modified code ... order.shipping_required = shipping_required return order // ... rest of the code ...
d9c635f3ee15a921b4ef197ee74a4af4904849a7
src/main/java/org/realityforge/tarrabah/SyslogUDPServer.java
src/main/java/org/realityforge/tarrabah/SyslogUDPServer.java
package org.realityforge.tarrabah; import javax.inject.Inject; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; public class SyslogUDPServer extends AbstractUDPServer { @Inject private SyslogHandler _syslogHandler; protected ChannelPipelineFactory newPipelineFactory() { return new ChannelPipelineFactory() { public ChannelPipeline getPipeline() throws Exception { _syslogHandler = new SyslogHandler(); return Channels.pipeline( _syslogHandler ); } }; } }
package org.realityforge.tarrabah; import javax.inject.Inject; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; public class SyslogUDPServer extends AbstractUDPServer { @Inject private SyslogHandler _syslogHandler; protected ChannelPipelineFactory newPipelineFactory() { return new ChannelPipelineFactory() { public ChannelPipeline getPipeline() throws Exception { return Channels.pipeline( _syslogHandler ); } }; } }
Remove explicit instantiation as bean is CDI managed
Remove explicit instantiation as bean is CDI managed
Java
apache-2.0
realityforge-experiments/tarrabah,realityforge-experiments/tarrabah
java
## Code Before: package org.realityforge.tarrabah; import javax.inject.Inject; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; public class SyslogUDPServer extends AbstractUDPServer { @Inject private SyslogHandler _syslogHandler; protected ChannelPipelineFactory newPipelineFactory() { return new ChannelPipelineFactory() { public ChannelPipeline getPipeline() throws Exception { _syslogHandler = new SyslogHandler(); return Channels.pipeline( _syslogHandler ); } }; } } ## Instruction: Remove explicit instantiation as bean is CDI managed ## Code After: package org.realityforge.tarrabah; import javax.inject.Inject; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; public class SyslogUDPServer extends AbstractUDPServer { @Inject private SyslogHandler _syslogHandler; protected ChannelPipelineFactory newPipelineFactory() { return new ChannelPipelineFactory() { public ChannelPipeline getPipeline() throws Exception { return Channels.pipeline( _syslogHandler ); } }; } }
... public ChannelPipeline getPipeline() throws Exception { return Channels.pipeline( _syslogHandler ); } }; ...
881222a49c6b3e8792adf5754c61992bd12c7b28
tests/test_conduction.py
tests/test_conduction.py
"""Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) self.conduction = pymongo.MongoClient(self.mockup.uri).test def test_bad_command_name(self): with self.assertRaises(OperationFailure): self.conduction.command('foo') if __name__ == '__main__': unittest.main()
"""Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) # Any database name will do. self.conduction = pymongo.MongoClient(self.mockup.uri).conduction def test_root_uri(self): reply = self.conduction.command('get', '/') self.assertIn('links', reply) self.assertIn('service', reply) def test_bad_command_name(self): with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') self.assertIn('unrecognized: {"foo": 1}', str(context.exception)) def test_server_id_404(self): with self.assertRaises(OperationFailure) as context: self.conduction.command({'post': '/v1/servers/'}) self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': unittest.main()
Test root URI and 404s.
Test root URI and 404s.
Python
apache-2.0
ajdavis/mongo-conduction
python
## Code Before: """Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) self.conduction = pymongo.MongoClient(self.mockup.uri).test def test_bad_command_name(self): with self.assertRaises(OperationFailure): self.conduction.command('foo') if __name__ == '__main__': unittest.main() ## Instruction: Test root URI and 404s. ## Code After: """Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) # Any database name will do. self.conduction = pymongo.MongoClient(self.mockup.uri).conduction def test_root_uri(self): reply = self.conduction.command('get', '/') self.assertIn('links', reply) self.assertIn('service', reply) def test_bad_command_name(self): with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') self.assertIn('unrecognized: {"foo": 1}', str(context.exception)) def test_server_id_404(self): with self.assertRaises(OperationFailure) as context: self.conduction.command({'post': '/v1/servers/'}) self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': unittest.main()
// ... existing code ... self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) # Any database name will do. self.conduction = pymongo.MongoClient(self.mockup.uri).conduction def test_root_uri(self): reply = self.conduction.command('get', '/') self.assertIn('links', reply) self.assertIn('service', reply) def test_bad_command_name(self): with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') self.assertIn('unrecognized: {"foo": 1}', str(context.exception)) def test_server_id_404(self): with self.assertRaises(OperationFailure) as context: self.conduction.command({'post': '/v1/servers/'}) self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': // ... rest of the code ...
2a95f05a2670c47a789adad303498b6ba18dd64a
multiplayer-server/src/main/java/br/odb/multiplayer/model/ServerContext.java
multiplayer-server/src/main/java/br/odb/multiplayer/model/ServerContext.java
package br.odb.multiplayer.model; import java.util.HashMap; import javax.servlet.ServletContext; public class ServerContext { public final HashMap<Integer, Game> games = new HashMap<Integer, Game>(); public final HashMap< String, Game > gameBuilders = new HashMap< String, Game >(); public ServerContext() { } public static ServerContext createOrRetrieve( ServletContext servletContext) { ServerContext context = (ServerContext) servletContext .getAttribute("games-context"); if (context == null) { reset( servletContext ); } return context; } public static void reset(ServletContext servletContext) { servletContext.setAttribute("games-context", new ServerContext() ); } }
package br.odb.multiplayer.model; import java.util.HashMap; import javax.servlet.ServletContext; public class ServerContext { public final HashMap<Integer, Game> games = new HashMap<Integer, Game>(); public final HashMap< String, Game > gameBuilders = new HashMap< String, Game >(); public ServerContext() { } public static ServerContext createOrRetrieve( ServletContext servletContext) { ServerContext context = (ServerContext) servletContext .getAttribute("games-context"); if (context == null) { reset( servletContext ); context = (ServerContext) servletContext.getAttribute("games-context"); } return context; } public static void reset(ServletContext servletContext) { servletContext.setAttribute("games-context", new ServerContext() ); } }
Fix idiotic bug in GetGameId
Fix idiotic bug in GetGameId The first try to play would always fail.
Java
bsd-2-clause
TheFakeMontyOnTheRun/tic-tac-toe-multiplayer,TheFakeMontyOnTheRun/tic-tac-toe-multiplayer
java
## Code Before: package br.odb.multiplayer.model; import java.util.HashMap; import javax.servlet.ServletContext; public class ServerContext { public final HashMap<Integer, Game> games = new HashMap<Integer, Game>(); public final HashMap< String, Game > gameBuilders = new HashMap< String, Game >(); public ServerContext() { } public static ServerContext createOrRetrieve( ServletContext servletContext) { ServerContext context = (ServerContext) servletContext .getAttribute("games-context"); if (context == null) { reset( servletContext ); } return context; } public static void reset(ServletContext servletContext) { servletContext.setAttribute("games-context", new ServerContext() ); } } ## Instruction: Fix idiotic bug in GetGameId The first try to play would always fail. ## Code After: package br.odb.multiplayer.model; import java.util.HashMap; import javax.servlet.ServletContext; public class ServerContext { public final HashMap<Integer, Game> games = new HashMap<Integer, Game>(); public final HashMap< String, Game > gameBuilders = new HashMap< String, Game >(); public ServerContext() { } public static ServerContext createOrRetrieve( ServletContext servletContext) { ServerContext context = (ServerContext) servletContext .getAttribute("games-context"); if (context == null) { reset( servletContext ); context = (ServerContext) servletContext.getAttribute("games-context"); } return context; } public static void reset(ServletContext servletContext) { servletContext.setAttribute("games-context", new ServerContext() ); } }
# ... existing code ... if (context == null) { reset( servletContext ); context = (ServerContext) servletContext.getAttribute("games-context"); } return context; # ... rest of the code ...
e2fa4b150546be4b4f0ae59f18ef6ba2b6180d1a
accounts/serializers.py
accounts/serializers.py
"""Serializers for account models""" # pylint: disable=too-few-public-methods from rest_framework import serializers from accounts.models import User class UserSerializer(serializers.ModelSerializer): """Serializer for Users""" class Meta: """Model and field definitions""" model = User fields = ( 'id', 'username', 'email', 'website', 'avatar', 'steamid', 'is_staff', )
"""Serializers for account models""" # pylint: disable=too-few-public-methods from rest_framework import serializers from accounts.models import User class UserSerializer(serializers.ModelSerializer): """Serializer for Users""" class Meta: """Model and field definitions""" model = User fields = ( "id", "username", "email", "website", "avatar_url", "steamid", "is_staff", )
Change avatar to avatar_url in the user API
Change avatar to avatar_url in the user API
Python
agpl-3.0
lutris/website,lutris/website,lutris/website,lutris/website
python
## Code Before: """Serializers for account models""" # pylint: disable=too-few-public-methods from rest_framework import serializers from accounts.models import User class UserSerializer(serializers.ModelSerializer): """Serializer for Users""" class Meta: """Model and field definitions""" model = User fields = ( 'id', 'username', 'email', 'website', 'avatar', 'steamid', 'is_staff', ) ## Instruction: Change avatar to avatar_url in the user API ## Code After: """Serializers for account models""" # pylint: disable=too-few-public-methods from rest_framework import serializers from accounts.models import User class UserSerializer(serializers.ModelSerializer): """Serializer for Users""" class Meta: """Model and field definitions""" model = User fields = ( "id", "username", "email", "website", "avatar_url", "steamid", "is_staff", )
// ... existing code ... class Meta: """Model and field definitions""" model = User fields = ( "id", "username", "email", "website", "avatar_url", "steamid", "is_staff", ) // ... rest of the code ...
61c4c634807b4adfe9e08152543eba396e256ab9
conllu/tree_helpers.py
conllu/tree_helpers.py
from __future__ import print_function, unicode_literals from collections import namedtuple TreeNode = namedtuple('TreeNode', ['data', 'children']) def create_tree(node_children_mapping, start=0): subtree = [ TreeNode(child, create_tree(node_children_mapping, child["id"])) for child in node_children_mapping[start] ] return subtree def print_tree(node, depth=0): assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node) print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format( deprel=node.data["deprel"], form=node.data["form"], tag=node.data["upostag"], idx=node.data["id"], )) for child in node.children: print_tree(child, depth + 1)
from __future__ import print_function, unicode_literals from collections import namedtuple TreeNode = namedtuple('TreeNode', ['data', 'children']) def create_tree(node_children_mapping, start=0): subtree = [ TreeNode(child, create_tree(node_children_mapping, child["id"])) for child in node_children_mapping[start] ] return subtree def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]): assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node) relevant_data = node.data.copy() map(lambda x: relevant_data.pop(x, None), exclude_fields) node_repr = " ".join([ "{key}:{value}".format(key=key, value=value) for key, value in relevant_data.items() ]) print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format( deprel=node.data["deprel"], node_repr=node_repr, idx=node.data["id"], )) for child in node.children: print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
Generalize print_tree to work with different number of columns.
Generalize print_tree to work with different number of columns.
Python
mit
EmilStenstrom/conllu
python
## Code Before: from __future__ import print_function, unicode_literals from collections import namedtuple TreeNode = namedtuple('TreeNode', ['data', 'children']) def create_tree(node_children_mapping, start=0): subtree = [ TreeNode(child, create_tree(node_children_mapping, child["id"])) for child in node_children_mapping[start] ] return subtree def print_tree(node, depth=0): assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node) print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format( deprel=node.data["deprel"], form=node.data["form"], tag=node.data["upostag"], idx=node.data["id"], )) for child in node.children: print_tree(child, depth + 1) ## Instruction: Generalize print_tree to work with different number of columns. ## Code After: from __future__ import print_function, unicode_literals from collections import namedtuple TreeNode = namedtuple('TreeNode', ['data', 'children']) def create_tree(node_children_mapping, start=0): subtree = [ TreeNode(child, create_tree(node_children_mapping, child["id"])) for child in node_children_mapping[start] ] return subtree def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]): assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node) relevant_data = node.data.copy() map(lambda x: relevant_data.pop(x, None), exclude_fields) node_repr = " ".join([ "{key}:{value}".format(key=key, value=value) for key, value in relevant_data.items() ]) print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format( deprel=node.data["deprel"], node_repr=node_repr, idx=node.data["id"], )) for child in node.children: print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
... ] return subtree def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]): assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node) relevant_data = node.data.copy() map(lambda x: relevant_data.pop(x, None), exclude_fields) node_repr = " ".join([ "{key}:{value}".format(key=key, value=value) for key, value in relevant_data.items() ]) print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format( deprel=node.data["deprel"], node_repr=node_repr, idx=node.data["id"], )) for child in node.children: print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields) ...
27a944d5fc74972a90e8dd69879ebc27c4412b99
test/python_api/default-constructor/sb_frame.py
test/python_api/default-constructor/sb_frame.py
import sys import lldb def fuzz_obj(obj): obj.GetFrameID() obj.GetPC() obj.SetPC(0xffffffff) obj.GetSP() obj.GetFP() obj.GetPCAddress() obj.GetSymbolContext(0) obj.GetModule() obj.GetCompileUnit() obj.GetFunction() obj.GetSymbol() obj.GetBlock() obj.GetFunctionName() obj.IsInlined() obj.EvaluateExpression("x + y") obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget) obj.GetFrameBlock() obj.GetLineEntry() obj.GetThread() obj.Disassemble() obj.GetVariables(True, True, True, True) obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget) obj.GetRegisters() obj.FindVariable("my_var") obj.FindVariable("my_var", lldb.eDynamicCanRunTarget) obj.GetDescription(lldb.SBStream()) obj.Clear()
import sys import lldb def fuzz_obj(obj): obj.GetFrameID() obj.GetPC() obj.SetPC(0xffffffff) obj.GetSP() obj.GetFP() obj.GetPCAddress() obj.GetSymbolContext(0) obj.GetModule() obj.GetCompileUnit() obj.GetFunction() obj.GetSymbol() obj.GetBlock() obj.GetFunctionName() obj.IsInlined() obj.EvaluateExpression("x + y") obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget) obj.GetFrameBlock() obj.GetLineEntry() obj.GetThread() obj.Disassemble() obj.GetVariables(True, True, True, True) obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget) obj.GetRegisters() obj.FindVariable("my_var") obj.FindVariable("my_var", lldb.eDynamicCanRunTarget) obj.FindValue("your_var", lldb.eValueTypeVariableGlobal) obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget) obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ) obj.GetDescription(lldb.SBStream()) obj.Clear()
Add FindValue() and WatchValue() fuzz calls to the mix.
Add FindValue() and WatchValue() fuzz calls to the mix. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb
python
## Code Before: import sys import lldb def fuzz_obj(obj): obj.GetFrameID() obj.GetPC() obj.SetPC(0xffffffff) obj.GetSP() obj.GetFP() obj.GetPCAddress() obj.GetSymbolContext(0) obj.GetModule() obj.GetCompileUnit() obj.GetFunction() obj.GetSymbol() obj.GetBlock() obj.GetFunctionName() obj.IsInlined() obj.EvaluateExpression("x + y") obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget) obj.GetFrameBlock() obj.GetLineEntry() obj.GetThread() obj.Disassemble() obj.GetVariables(True, True, True, True) obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget) obj.GetRegisters() obj.FindVariable("my_var") obj.FindVariable("my_var", lldb.eDynamicCanRunTarget) obj.GetDescription(lldb.SBStream()) obj.Clear() ## Instruction: Add FindValue() and WatchValue() fuzz calls to the mix. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: import sys import lldb def fuzz_obj(obj): obj.GetFrameID() obj.GetPC() obj.SetPC(0xffffffff) obj.GetSP() obj.GetFP() obj.GetPCAddress() obj.GetSymbolContext(0) obj.GetModule() obj.GetCompileUnit() obj.GetFunction() obj.GetSymbol() obj.GetBlock() obj.GetFunctionName() obj.IsInlined() obj.EvaluateExpression("x + y") obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget) obj.GetFrameBlock() obj.GetLineEntry() obj.GetThread() obj.Disassemble() obj.GetVariables(True, True, True, True) obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget) obj.GetRegisters() obj.FindVariable("my_var") obj.FindVariable("my_var", lldb.eDynamicCanRunTarget) obj.FindValue("your_var", lldb.eValueTypeVariableGlobal) obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget) obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ) obj.GetDescription(lldb.SBStream()) obj.Clear()
... obj.GetRegisters() obj.FindVariable("my_var") obj.FindVariable("my_var", lldb.eDynamicCanRunTarget) obj.FindValue("your_var", lldb.eValueTypeVariableGlobal) obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget) obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ) obj.GetDescription(lldb.SBStream()) obj.Clear() ...
6b762607914e1c79bc05f7e8d5cdbe6c6d7a49e4
hiro/patches.py
hiro/patches.py
import abc from datetime import date as realdate from datetime import datetime as realdatetime import time import six class DatetimeMeta(abc.ABCMeta): """ meta class to allow interaction between :class:`datetime.datetime` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdatetime) class DateMeta(type): """ meta class to allow interaction between :class:`datetime.date` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdate) @six.add_metaclass(DatetimeMeta) class Datetime(realdatetime): """ used to patch :class:`datetime.datetime` to follow the rules of the parent :class:`hiro.Timeline` """ @classmethod def now(cls, tz=None): return cls.fromtimestamp(time.time(), tz) @classmethod def utcnow(cls): return cls.fromtimestamp(time.mktime(time.gmtime())) @six.add_metaclass(DateMeta) class Date(realdate): """ used to patch :class:`datetime.date` to follow the rules of the parent :class:`hiro.Timeline` """ __metaclass__ = DateMeta @classmethod def today(cls): return cls.fromtimestamp(time.time())
import abc from datetime import date as realdate from datetime import datetime as realdatetime import time import six class DatetimeMeta(abc.ABCMeta): """ meta class to allow interaction between :class:`datetime.datetime` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdatetime) class DateMeta(type): """ meta class to allow interaction between :class:`datetime.date` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdate) @six.add_metaclass(DatetimeMeta) class Datetime(realdatetime): """ used to patch :class:`datetime.datetime` to follow the rules of the parent :class:`hiro.Timeline` """ @classmethod def now(cls, tz=None): return cls.fromtimestamp(time.time(), tz) @classmethod def utcnow(cls): return cls.utcfromtimestamp(time.time()) @six.add_metaclass(DateMeta) class Date(realdate): """ used to patch :class:`datetime.date` to follow the rules of the parent :class:`hiro.Timeline` """ __metaclass__ = DateMeta @classmethod def today(cls): return cls.fromtimestamp(time.time())
Fix issue with daylight saving time + utcnow
Fix issue with daylight saving time + utcnow Fixes issue #2
Python
mit
alisaifee/hiro,alisaifee/hiro
python
## Code Before: import abc from datetime import date as realdate from datetime import datetime as realdatetime import time import six class DatetimeMeta(abc.ABCMeta): """ meta class to allow interaction between :class:`datetime.datetime` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdatetime) class DateMeta(type): """ meta class to allow interaction between :class:`datetime.date` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdate) @six.add_metaclass(DatetimeMeta) class Datetime(realdatetime): """ used to patch :class:`datetime.datetime` to follow the rules of the parent :class:`hiro.Timeline` """ @classmethod def now(cls, tz=None): return cls.fromtimestamp(time.time(), tz) @classmethod def utcnow(cls): return cls.fromtimestamp(time.mktime(time.gmtime())) @six.add_metaclass(DateMeta) class Date(realdate): """ used to patch :class:`datetime.date` to follow the rules of the parent :class:`hiro.Timeline` """ __metaclass__ = DateMeta @classmethod def today(cls): return cls.fromtimestamp(time.time()) ## Instruction: Fix issue with daylight saving time + utcnow Fixes issue #2 ## Code After: import abc from datetime import date as realdate from datetime import datetime as realdatetime import time import six class DatetimeMeta(abc.ABCMeta): """ meta class to allow interaction between :class:`datetime.datetime` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdatetime) class DateMeta(type): """ meta class to allow interaction between :class:`datetime.date` objects create inside the :class:`hiro.Timeline` with those created outside it. """ def __instancecheck__(cls, instance): return isinstance(instance, realdate) @six.add_metaclass(DatetimeMeta) class Datetime(realdatetime): """ used to patch :class:`datetime.datetime` to follow the rules of the parent :class:`hiro.Timeline` """ @classmethod def now(cls, tz=None): return cls.fromtimestamp(time.time(), tz) @classmethod def utcnow(cls): return cls.utcfromtimestamp(time.time()) @six.add_metaclass(DateMeta) class Date(realdate): """ used to patch :class:`datetime.date` to follow the rules of the parent :class:`hiro.Timeline` """ __metaclass__ = DateMeta @classmethod def today(cls): return cls.fromtimestamp(time.time())
# ... existing code ... @classmethod def utcnow(cls): return cls.utcfromtimestamp(time.time()) @six.add_metaclass(DateMeta) class Date(realdate): # ... rest of the code ...
99bdc393d44a8fe9f73efcae141f1a0627a24f17
setup.py
setup.py
import os from setuptools import setup, find_packages packages = find_packages() packages.remove('sample_project') classifiers = """ Topic :: Internet :: WWW/HTTP :: Dynamic Content Intended Audience :: Developers License :: OSI Approved :: BSD License Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules Development Status :: 4 - Beta """ setup( name='django-pagelets', version='0.5', author='Caktus Consulting Group', author_email='[email protected]', packages=packages, install_requires = [], include_package_data = True, exclude_package_data={ '': ['*.sql', '*.pyc'], 'pagelets': ['media/*'], }, url='http://http://github.com/caktus/django-pagelets', license='LICENSE.txt', description='Simple, flexible app for integrating static, unstructured ' 'content in a Django site', classifiers = filter(None, classifiers.split("\n")), long_description=open('README.rst').read(), )
import os from setuptools import setup, find_packages packages = find_packages(exclude=['sample_project']) classifiers = """ Topic :: Internet :: WWW/HTTP :: Dynamic Content Intended Audience :: Developers License :: OSI Approved :: BSD License Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules Development Status :: 4 - Beta Operating System :: OS Independent """ setup( name='django-pagelets', version='0.5', author='Caktus Consulting Group', author_email='[email protected]', packages=packages, install_requires = [], include_package_data = True, exclude_package_data={ '': ['*.sql', '*.pyc'], 'pagelets': ['media/*'], }, url='http://http://github.com/caktus/django-pagelets', license='LICENSE.txt', description='Simple, flexible app for integrating static, unstructured ' 'content in a Django site', classifiers = filter(None, classifiers.split("\n")), long_description=open('README.rst').read(), )
Fix exclude of sample_project for installation.
Fix exclude of sample_project for installation.
Python
bsd-3-clause
caktus/django-pagelets,caktus/django-pagelets,caktus/django-pagelets,caktus/django-pagelets
python
## Code Before: import os from setuptools import setup, find_packages packages = find_packages() packages.remove('sample_project') classifiers = """ Topic :: Internet :: WWW/HTTP :: Dynamic Content Intended Audience :: Developers License :: OSI Approved :: BSD License Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules Development Status :: 4 - Beta """ setup( name='django-pagelets', version='0.5', author='Caktus Consulting Group', author_email='[email protected]', packages=packages, install_requires = [], include_package_data = True, exclude_package_data={ '': ['*.sql', '*.pyc'], 'pagelets': ['media/*'], }, url='http://http://github.com/caktus/django-pagelets', license='LICENSE.txt', description='Simple, flexible app for integrating static, unstructured ' 'content in a Django site', classifiers = filter(None, classifiers.split("\n")), long_description=open('README.rst').read(), ) ## Instruction: Fix exclude of sample_project for installation. ## Code After: import os from setuptools import setup, find_packages packages = find_packages(exclude=['sample_project']) classifiers = """ Topic :: Internet :: WWW/HTTP :: Dynamic Content Intended Audience :: Developers License :: OSI Approved :: BSD License Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules Development Status :: 4 - Beta Operating System :: OS Independent """ setup( name='django-pagelets', version='0.5', author='Caktus Consulting Group', author_email='[email protected]', packages=packages, install_requires = [], include_package_data = True, exclude_package_data={ '': ['*.sql', '*.pyc'], 'pagelets': ['media/*'], }, url='http://http://github.com/caktus/django-pagelets', license='LICENSE.txt', description='Simple, flexible app for integrating static, unstructured ' 'content in a Django site', classifiers = filter(None, classifiers.split("\n")), long_description=open('README.rst').read(), )
// ... existing code ... import os from setuptools import setup, find_packages packages = find_packages(exclude=['sample_project']) classifiers = """ Topic :: Internet :: WWW/HTTP :: Dynamic Content // ... modified code ... Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules Development Status :: 4 - Beta Operating System :: OS Independent """ setup( // ... rest of the code ...
326e7ba1378b691ad6323c2559686f0c4d97b45f
flowgen/core.py
flowgen/core.py
from flowgen.graph import Graph from flowgen.language import Code from flowgen.options import parser from pypeg2 import parse from pypeg2.xmlast import thing2xml class FlowGen(object): def __init__(self, args): self.args = parser.parse_args(args) def any_output(self): return any([self.args.dump_source, self.args.dump_xml]) def safe_print(self, *args, **kwargs): if not self.any_output(): print(*args, **kwargs) def run(self): data_input = self.args.infile.read() tree = parse(data_input, Code) if self.args.dump_xml: print(thing2xml(tree, pretty=True).decode()) graph = Graph(tree) if self.args.dump_source: print(graph.get_source()) if self.args.preview: graph.dot.view() if self.args.outfile: graph.save(self.args.outfile.name) self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
from __future__ import print_function from flowgen.graph import Graph from flowgen.language import Code from flowgen.options import parser from pypeg2 import parse from pypeg2.xmlast import thing2xml class FlowGen(object): def __init__(self, args): self.args = parser.parse_args(args) def any_output(self): return any([self.args.dump_source, self.args.dump_xml]) def safe_print(self, *args, **kwargs): if not self.any_output(): print(*args, **kwargs) def run(self): data_input = self.args.infile.read() tree = parse(data_input, Code) if self.args.dump_xml: print(thing2xml(tree, pretty=True).decode()) graph = Graph(tree) if self.args.dump_source: print(graph.get_source()) if self.args.preview: graph.dot.view() if self.args.outfile: graph.save(self.args.outfile.name) self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
Update py27 compatibility in print function
Update py27 compatibility in print function
Python
mit
ad-m/flowgen
python
## Code Before: from flowgen.graph import Graph from flowgen.language import Code from flowgen.options import parser from pypeg2 import parse from pypeg2.xmlast import thing2xml class FlowGen(object): def __init__(self, args): self.args = parser.parse_args(args) def any_output(self): return any([self.args.dump_source, self.args.dump_xml]) def safe_print(self, *args, **kwargs): if not self.any_output(): print(*args, **kwargs) def run(self): data_input = self.args.infile.read() tree = parse(data_input, Code) if self.args.dump_xml: print(thing2xml(tree, pretty=True).decode()) graph = Graph(tree) if self.args.dump_source: print(graph.get_source()) if self.args.preview: graph.dot.view() if self.args.outfile: graph.save(self.args.outfile.name) self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name)) ## Instruction: Update py27 compatibility in print function ## Code After: from __future__ import print_function from flowgen.graph import Graph from flowgen.language import Code from flowgen.options import parser from pypeg2 import parse from pypeg2.xmlast import thing2xml class FlowGen(object): def __init__(self, args): self.args = parser.parse_args(args) def any_output(self): return any([self.args.dump_source, self.args.dump_xml]) def safe_print(self, *args, **kwargs): if not self.any_output(): print(*args, **kwargs) def run(self): data_input = self.args.infile.read() tree = parse(data_input, Code) if self.args.dump_xml: print(thing2xml(tree, pretty=True).decode()) graph = Graph(tree) if self.args.dump_source: print(graph.get_source()) if self.args.preview: graph.dot.view() if self.args.outfile: graph.save(self.args.outfile.name) self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
# ... existing code ... from __future__ import print_function from flowgen.graph import Graph from flowgen.language import Code from flowgen.options import parser # ... rest of the code ...
759e22f8d629f76d7fca0d0567603c9ae6835fa6
api_v3/serializers/profile.py
api_v3/serializers/profile.py
from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': settings.MEMBER_CENTERS, 'expense_scopes': settings.EXPENSE_SCOPES }
from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': sorted(settings.MEMBER_CENTERS), 'expense_scopes': sorted(settings.EXPENSE_SCOPES) }
Return sorted member centers and expense scopes.
Return sorted member centers and expense scopes.
Python
mit
occrp/id-backend
python
## Code Before: from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': settings.MEMBER_CENTERS, 'expense_scopes': settings.EXPENSE_SCOPES } ## Instruction: Return sorted member centers and expense scopes. ## Code After: from django.conf import settings from rest_framework import fields from rest_framework_json_api import serializers from api_v3.models import Profile, Ticket class ProfileSerializer(serializers.ModelSerializer): tickets_count = fields.SerializerMethodField() class Meta: model = Profile read_only_fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'locale' ) fields = ( 'id', 'email', 'first_name', 'last_name', 'is_staff', 'is_superuser', 'bio', 'locale', 'tickets_count' ) def get_tickets_count(self, obj): if obj.is_superuser: return Ticket.objects.count() else: return Ticket.filter_by_user(obj).count() def to_representation(self, obj): request = self.context.get('request', None) data = super(ProfileSerializer, self).to_representation(obj) if request and request.user and request.user.is_superuser: return data # For regular users, make sure others email is not displayed if request and request.user != obj: data.pop('email') return data # Adds extra application related metas. def get_root_meta(self, resource, many): if not self.context.get('add_misc', None): return {} return { 'member_centers': sorted(settings.MEMBER_CENTERS), 'expense_scopes': sorted(settings.EXPENSE_SCOPES) }
# ... existing code ... return {} return { 'member_centers': sorted(settings.MEMBER_CENTERS), 'expense_scopes': sorted(settings.EXPENSE_SCOPES) } # ... rest of the code ...
e2f9c0c0e8b96e44c5410c242d0609ef36b5ee4e
tests/test_ghostscript.py
tests/test_ghostscript.py
import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(str(stderr), "") self.assertRegexpMatches(str(stdout), r'9\.\d\d')
import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], universal_newlines=True, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(stderr, "") self.assertRegexpMatches(stdout, r'9\.\d\d')
Make Popen.communicate return output as strings not bytes.
Make Popen.communicate return output as strings not bytes.
Python
mit
YPlan/treepoem
python
## Code Before: import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(str(stderr), "") self.assertRegexpMatches(str(stdout), r'9\.\d\d') ## Instruction: Make Popen.communicate return output as strings not bytes. ## Code After: import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], universal_newlines=True, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(stderr, "") self.assertRegexpMatches(stdout, r'9\.\d\d')
... def test_installed(self): process = subprocess.Popen( ['gs', '--version'], universal_newlines=True, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ... stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(stderr, "") self.assertRegexpMatches(stdout, r'9\.\d\d') ...
f1acec57657a455642905103052b9c2b658f067f
src/main/java/com/google/sps/data/AggregationResponseEntry.java
src/main/java/com/google/sps/data/AggregationResponseEntry.java
package com.google.sps.data; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.collections4.keyvalue.MultiKey; /* * Class representing one row/entry in the aggregation response. A NULL field value * means the field was not being aggregated by, and will be omitted from the JSON response */ public final class AggregationResponseEntry { private String annotatedAssetId; private String annotatedLocation; private String annotatedUser; private final int count; private List<String> deviceIds; private List<String> serialNumbers; public AggregationResponseEntry( MultiKey key, List<ChromeOSDevice> devices, LinkedHashSet<AnnotatedField> fields) { String keys[] = (String[]) key.getKeys(); int currKey = 0; Iterator<AnnotatedField> it = fields.iterator(); while (it.hasNext()) { switch(it.next()) { case ASSET_ID: this.annotatedAssetId = keys[currKey++]; break; case LOCATION: this.annotatedLocation = keys[currKey++]; break; case USER: this.annotatedUser = keys[currKey++]; break; } } this.count = devices.size(); this.deviceIds = devices.stream() .map(device -> device.getDeviceId()) .collect(Collectors.toList()); this.serialNumbers = devices.stream() .map(device -> device.getSerialNumber()) .collect(Collectors.toList()); } }
package com.google.sps.data; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import org.apache.commons.collections4.keyvalue.MultiKey; /* * Class representing one row/entry in the aggregation response. A NULL field value * means the field was not being aggregated by, and will be omitted from the JSON response */ public final class AggregationResponseEntry { private String annotatedAssetId; private String annotatedLocation; private String annotatedUser; private final int count; private List<String> deviceIds; private List<String> serialNumbers; public AggregationResponseEntry( MultiKey key, List<ChromeOSDevice> devices, LinkedHashSet<AnnotatedField> fields) { String keys[] = (String[]) key.getKeys(); int currKey = 0; Iterator<AnnotatedField> it = fields.iterator(); while (it.hasNext()) { switch(it.next()) { case ASSET_ID: this.annotatedAssetId = keys[currKey++]; break; case LOCATION: this.annotatedLocation = keys[currKey++]; break; case USER: this.annotatedUser = keys[currKey++]; break; } } this.count = devices.size(); this.deviceIds = new ArrayList<String>(); this.serialNumbers = new ArrayList<String>(); for (ChromeOSDevice device : devices) { this.deviceIds.add(device.getDeviceId()); this.serialNumbers.add(device.getSerialNumber()); } } }
Improve efficiency by merging two loops accessing the same data into one loop
Improve efficiency by merging two loops accessing the same data into one loop
Java
apache-2.0
googleinterns/step176-2020,googleinterns/step176-2020,googleinterns/step176-2020
java
## Code Before: package com.google.sps.data; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.collections4.keyvalue.MultiKey; /* * Class representing one row/entry in the aggregation response. A NULL field value * means the field was not being aggregated by, and will be omitted from the JSON response */ public final class AggregationResponseEntry { private String annotatedAssetId; private String annotatedLocation; private String annotatedUser; private final int count; private List<String> deviceIds; private List<String> serialNumbers; public AggregationResponseEntry( MultiKey key, List<ChromeOSDevice> devices, LinkedHashSet<AnnotatedField> fields) { String keys[] = (String[]) key.getKeys(); int currKey = 0; Iterator<AnnotatedField> it = fields.iterator(); while (it.hasNext()) { switch(it.next()) { case ASSET_ID: this.annotatedAssetId = keys[currKey++]; break; case LOCATION: this.annotatedLocation = keys[currKey++]; break; case USER: this.annotatedUser = keys[currKey++]; break; } } this.count = devices.size(); this.deviceIds = devices.stream() .map(device -> device.getDeviceId()) .collect(Collectors.toList()); this.serialNumbers = devices.stream() .map(device -> device.getSerialNumber()) .collect(Collectors.toList()); } } ## Instruction: Improve efficiency by merging two loops accessing the same data into one loop ## Code After: package com.google.sps.data; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import org.apache.commons.collections4.keyvalue.MultiKey; /* * Class representing one row/entry in the aggregation response. A NULL field value * means the field was not being aggregated by, and will be omitted from the JSON response */ public final class AggregationResponseEntry { private String annotatedAssetId; private String annotatedLocation; private String annotatedUser; private final int count; private List<String> deviceIds; private List<String> serialNumbers; public AggregationResponseEntry( MultiKey key, List<ChromeOSDevice> devices, LinkedHashSet<AnnotatedField> fields) { String keys[] = (String[]) key.getKeys(); int currKey = 0; Iterator<AnnotatedField> it = fields.iterator(); while (it.hasNext()) { switch(it.next()) { case ASSET_ID: this.annotatedAssetId = keys[currKey++]; break; case LOCATION: this.annotatedLocation = keys[currKey++]; break; case USER: this.annotatedUser = keys[currKey++]; break; } } this.count = devices.size(); this.deviceIds = new ArrayList<String>(); this.serialNumbers = new ArrayList<String>(); for (ChromeOSDevice device : devices) { this.deviceIds.add(device.getDeviceId()); this.serialNumbers.add(device.getSerialNumber()); } } }
# ... existing code ... package com.google.sps.data; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import org.apache.commons.collections4.keyvalue.MultiKey; /* # ... modified code ... } this.count = devices.size(); this.deviceIds = new ArrayList<String>(); this.serialNumbers = new ArrayList<String>(); for (ChromeOSDevice device : devices) { this.deviceIds.add(device.getDeviceId()); this.serialNumbers.add(device.getSerialNumber()); } } } # ... rest of the code ...
e37ba4810d2fa759720b0e8047fe466f9e7fb294
src/main/java/com/gpm/calameo/SingleDrm.java
src/main/java/com/gpm/calameo/SingleDrm.java
/** * Copyright 2013 Mat Booth <[email protected]> */ package com.gpm.calameo; public class SingleDrm { private String ID; private String SubscriptionID; private String SubscriberLogin; private Publication Book; private String Extras; public SingleDrm() { } public String getId() { return ID; } public String getSubscriptionId() { return SubscriptionID; } public String getSubscriberLogin() { return SubscriberLogin; } public Publication getBook() { return Book; } public String getExtras() { return Extras; } @Override public String toString() { return String.format("{ ID: %s, Subscription ID: %s, Subscriber Login: %s, Book: %s, Extras: %s }", getId(), getSubscriptionId(), getSubscriberLogin(), getBook(), getExtras()); } }
/* * Copyright 2013 Mat Booth <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gpm.calameo; public class SingleDrm { private String ID; private String SubscriptionID; private String SubscriberLogin; private Publication Book; private String Extras; public SingleDrm() { } public String getId() { return ID; } public String getSubscriptionId() { return SubscriptionID; } public String getSubscriberLogin() { return SubscriberLogin; } public Publication getBook() { return Book; } public String getExtras() { return Extras; } @Override public String toString() { return String.format("{ ID: %s, Subscription ID: %s, Subscriber Login: %s, Book: %s, Extras: %s }", getId(), getSubscriptionId(), getSubscriberLogin(), getBook(), getExtras()); } }
Fix licence header in newly added file.
Fix licence header in newly added file.
Java
apache-2.0
mbooth101/calameo-api
java
## Code Before: /** * Copyright 2013 Mat Booth <[email protected]> */ package com.gpm.calameo; public class SingleDrm { private String ID; private String SubscriptionID; private String SubscriberLogin; private Publication Book; private String Extras; public SingleDrm() { } public String getId() { return ID; } public String getSubscriptionId() { return SubscriptionID; } public String getSubscriberLogin() { return SubscriberLogin; } public Publication getBook() { return Book; } public String getExtras() { return Extras; } @Override public String toString() { return String.format("{ ID: %s, Subscription ID: %s, Subscriber Login: %s, Book: %s, Extras: %s }", getId(), getSubscriptionId(), getSubscriberLogin(), getBook(), getExtras()); } } ## Instruction: Fix licence header in newly added file. ## Code After: /* * Copyright 2013 Mat Booth <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gpm.calameo; public class SingleDrm { private String ID; private String SubscriptionID; private String SubscriberLogin; private Publication Book; private String Extras; public SingleDrm() { } public String getId() { return ID; } public String getSubscriptionId() { return SubscriptionID; } public String getSubscriberLogin() { return SubscriberLogin; } public Publication getBook() { return Book; } public String getExtras() { return Extras; } @Override public String toString() { return String.format("{ ID: %s, Subscription ID: %s, Subscriber Login: %s, Book: %s, Extras: %s }", getId(), getSubscriptionId(), getSubscriberLogin(), getBook(), getExtras()); } }
// ... existing code ... /* * Copyright 2013 Mat Booth <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gpm.calameo; // ... rest of the code ...
af6a25f0e1ec0265c267e6ee4513925eaba6d0ed
arch/x86/include/asm/mmu.h
arch/x86/include/asm/mmu.h
/* * The x86 doesn't have a mmu context, but * we put the segment information here. */ typedef struct { void *ldt; int size; struct mutex lock; void *vdso; #ifdef CONFIG_X86_64 /* True if mm supports a task running in 32 bit compatibility mode. */ unsigned short ia32_compat; #endif } mm_context_t; #ifdef CONFIG_SMP void leave_mm(int cpu); #else static inline void leave_mm(int cpu) { } #endif #endif /* _ASM_X86_MMU_H */
/* * The x86 doesn't have a mmu context, but * we put the segment information here. */ typedef struct { void *ldt; int size; #ifdef CONFIG_X86_64 /* True if mm supports a task running in 32 bit compatibility mode. */ unsigned short ia32_compat; #endif struct mutex lock; void *vdso; } mm_context_t; #ifdef CONFIG_SMP void leave_mm(int cpu); #else static inline void leave_mm(int cpu) { } #endif #endif /* _ASM_X86_MMU_H */
Reorder mm_context_t to remove x86_64 alignment padding and thus shrink mm_struct
x86: Reorder mm_context_t to remove x86_64 alignment padding and thus shrink mm_struct Reorder mm_context_t to remove alignment padding on 64 bit builds shrinking its size from 64 to 56 bytes. This allows mm_struct to shrink from 840 to 832 bytes, so using one fewer cache lines, and getting more objects per slab when using slub. slabinfo mm_struct reports before :- Sizes (bytes) Slabs ----------------------------------- Object : 840 Total : 7 SlabObj: 896 Full : 1 SlabSiz: 16384 Partial: 4 Loss : 56 CpuSlab: 2 Align : 64 Objects: 18 after :- Sizes (bytes) Slabs ---------------------------------- Object : 832 Total : 7 SlabObj: 832 Full : 1 SlabSiz: 16384 Partial: 4 Loss : 0 CpuSlab: 2 Align : 64 Objects: 19 Signed-off-by: Richard Kennedy <[email protected]> Cc: [email protected] Cc: Linus Torvalds <[email protected]> Cc: Andrew Morton <[email protected]> Cc: Pekka Enberg <[email protected]> Link: [email protected] Signed-off-by: Ingo Molnar <[email protected]>
C
apache-2.0
TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas
c
## Code Before: /* * The x86 doesn't have a mmu context, but * we put the segment information here. */ typedef struct { void *ldt; int size; struct mutex lock; void *vdso; #ifdef CONFIG_X86_64 /* True if mm supports a task running in 32 bit compatibility mode. */ unsigned short ia32_compat; #endif } mm_context_t; #ifdef CONFIG_SMP void leave_mm(int cpu); #else static inline void leave_mm(int cpu) { } #endif #endif /* _ASM_X86_MMU_H */ ## Instruction: x86: Reorder mm_context_t to remove x86_64 alignment padding and thus shrink mm_struct Reorder mm_context_t to remove alignment padding on 64 bit builds shrinking its size from 64 to 56 bytes. This allows mm_struct to shrink from 840 to 832 bytes, so using one fewer cache lines, and getting more objects per slab when using slub. slabinfo mm_struct reports before :- Sizes (bytes) Slabs ----------------------------------- Object : 840 Total : 7 SlabObj: 896 Full : 1 SlabSiz: 16384 Partial: 4 Loss : 56 CpuSlab: 2 Align : 64 Objects: 18 after :- Sizes (bytes) Slabs ---------------------------------- Object : 832 Total : 7 SlabObj: 832 Full : 1 SlabSiz: 16384 Partial: 4 Loss : 0 CpuSlab: 2 Align : 64 Objects: 19 Signed-off-by: Richard Kennedy <[email protected]> Cc: [email protected] Cc: Linus Torvalds <[email protected]> Cc: Andrew Morton <[email protected]> Cc: Pekka Enberg <[email protected]> Link: [email protected] Signed-off-by: Ingo Molnar <[email protected]> ## Code After: /* * The x86 doesn't have a mmu context, but * we put the segment information here. */ typedef struct { void *ldt; int size; #ifdef CONFIG_X86_64 /* True if mm supports a task running in 32 bit compatibility mode. */ unsigned short ia32_compat; #endif struct mutex lock; void *vdso; } mm_context_t; #ifdef CONFIG_SMP void leave_mm(int cpu); #else static inline void leave_mm(int cpu) { } #endif #endif /* _ASM_X86_MMU_H */
... typedef struct { void *ldt; int size; #ifdef CONFIG_X86_64 /* True if mm supports a task running in 32 bit compatibility mode. */ ... unsigned short ia32_compat; #endif struct mutex lock; void *vdso; } mm_context_t; #ifdef CONFIG_SMP ...
3af6ec2f9cf9bf41d738fd4999283c6d368712ff
Dendrite/src/com/deuteriumlabs/dendrite/unittest/PageIdTest.java
Dendrite/src/com/deuteriumlabs/dendrite/unittest/PageIdTest.java
package com.deuteriumlabs.dendrite.unittest; import static org.junit.Assert.*; import org.junit.Test; public class PageIdTest { @Test public void test() { fail("Not yet implemented"); } }
package com.deuteriumlabs.dendrite.unittest; import static org.junit.Assert.assertNotNull; import org.junit.Test; import com.deuteriumlabs.dendrite.model.PageId; public class PageIdTest { @Test public void testDefaultConstructor() { final PageId pageId = new PageId(); assertNotNull(pageId); } }
Add test for default constructor
Add test for default constructor
Java
mit
MattHeard/Dendrite,MattHeard/Dendrite,MattHeard/Dendrite
java
## Code Before: package com.deuteriumlabs.dendrite.unittest; import static org.junit.Assert.*; import org.junit.Test; public class PageIdTest { @Test public void test() { fail("Not yet implemented"); } } ## Instruction: Add test for default constructor ## Code After: package com.deuteriumlabs.dendrite.unittest; import static org.junit.Assert.assertNotNull; import org.junit.Test; import com.deuteriumlabs.dendrite.model.PageId; public class PageIdTest { @Test public void testDefaultConstructor() { final PageId pageId = new PageId(); assertNotNull(pageId); } }
# ... existing code ... package com.deuteriumlabs.dendrite.unittest; import static org.junit.Assert.assertNotNull; import org.junit.Test; import com.deuteriumlabs.dendrite.model.PageId; public class PageIdTest { @Test public void testDefaultConstructor() { final PageId pageId = new PageId(); assertNotNull(pageId); } } # ... rest of the code ...
59bdf3787f77685161c291697f52c3b12ec57123
jkind-api/src/jkind/api/examples/Kind2WebExample.java
jkind-api/src/jkind/api/examples/Kind2WebExample.java
package jkind.api.examples; import java.io.FileReader; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.List; import jkind.api.xml.Kind2WebInputStream; import jkind.api.xml.LineInputStream; public class Kind2WebExample { public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println("Usage: Kind2WebExample [url] [filename]"); return; } URI uri = new URI(args[0]); String filename = args[1]; try (LineInputStream lines = new LineInputStream(new Kind2WebInputStream(uri, getKindArgs(), getLustre(filename)))) { String line; while ((line = lines.readLine()) != null) { System.out.print(line); } } } private static List<String> getKindArgs() { List<String> args = new ArrayList<>(); args.add("-xml"); return args; } private static String getLustre(String filename) throws IOException { try (FileReader reader = new FileReader(filename)) { StringBuilder result = new StringBuilder(); int i; while ((i = reader.read()) != -1) { result.append((char) i); } return result.toString(); } } }
package jkind.api.examples; import java.io.FileReader; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.List; import jkind.api.xml.Kind2WebInputStream; import jkind.api.xml.LineInputStream; public class Kind2WebExample { public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println("Usage: Kind2WebExample [url] [filename]"); return; } URI uri = new URI(args[0]); String filename = args[1]; try (LineInputStream lines = new LineInputStream(new Kind2WebInputStream(uri, getKindArgs(), getLustre(filename)))) { String line; while ((line = lines.readLine()) != null) { System.out.print(line); } } } private static List<String> getKindArgs() { List<String> args = new ArrayList<>(); args.add("-xml"); args.add("--timeout_wall"); args.add("100"); return args; } private static String getLustre(String filename) throws IOException { try (FileReader reader = new FileReader(filename)) { StringBuilder result = new StringBuilder(); int i; while ((i = reader.read()) != -1) { result.append((char) i); } return result.toString(); } } }
Add default time out to Kind 2 web example
Add default time out to Kind 2 web example
Java
bsd-3-clause
backesj/jkind,andrewkatis/jkind-1,lgwagner/jkind,agacek/jkind,backesj/jkind,andrewkatis/jkind-1,agacek/jkind,lgwagner/jkind
java
## Code Before: package jkind.api.examples; import java.io.FileReader; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.List; import jkind.api.xml.Kind2WebInputStream; import jkind.api.xml.LineInputStream; public class Kind2WebExample { public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println("Usage: Kind2WebExample [url] [filename]"); return; } URI uri = new URI(args[0]); String filename = args[1]; try (LineInputStream lines = new LineInputStream(new Kind2WebInputStream(uri, getKindArgs(), getLustre(filename)))) { String line; while ((line = lines.readLine()) != null) { System.out.print(line); } } } private static List<String> getKindArgs() { List<String> args = new ArrayList<>(); args.add("-xml"); return args; } private static String getLustre(String filename) throws IOException { try (FileReader reader = new FileReader(filename)) { StringBuilder result = new StringBuilder(); int i; while ((i = reader.read()) != -1) { result.append((char) i); } return result.toString(); } } } ## Instruction: Add default time out to Kind 2 web example ## Code After: package jkind.api.examples; import java.io.FileReader; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.List; import jkind.api.xml.Kind2WebInputStream; import jkind.api.xml.LineInputStream; public class Kind2WebExample { public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println("Usage: Kind2WebExample [url] [filename]"); return; } URI uri = new URI(args[0]); String filename = args[1]; try (LineInputStream lines = new LineInputStream(new Kind2WebInputStream(uri, getKindArgs(), getLustre(filename)))) { String line; while ((line = lines.readLine()) != null) { System.out.print(line); } } } private static List<String> getKindArgs() { List<String> args = new ArrayList<>(); args.add("-xml"); args.add("--timeout_wall"); args.add("100"); return args; } private static String getLustre(String filename) throws IOException { try (FileReader reader = new FileReader(filename)) { StringBuilder result = new StringBuilder(); int i; while ((i = reader.read()) != -1) { result.append((char) i); } return result.toString(); } } }
... private static List<String> getKindArgs() { List<String> args = new ArrayList<>(); args.add("-xml"); args.add("--timeout_wall"); args.add("100"); return args; } ...
054c283d1cdccdf8277acc96435672480587f6b9
devicehive/api_subscribe_request.py
devicehive/api_subscribe_request.py
class ApiSubscribeRequest(object): """Api request class.""" def __init__(self): self._action = None self._request = {} self._params = {'method': 'GET', 'url': None, 'params': {}, 'headers': {}, 'response_key': None} def action(self, action): self._action = action def set(self, key, value): if not value: return self._request[key] = value def method(self, method): self._params['method'] = method def url(self, url, **args): for key in args: value = args[key] url = url.replace('{%s}' % key, str(value)) self._params['url'] = url def param(self, key, value): if not value: return self._params['params'][key] = value def header(self, name, value): self._params['headers'][name] = value def response_key(self, response_key): self._params['response_key'] = response_key def extract(self): return self._action, self._request, self._params
class ApiSubscribeRequest(object): """Api request class.""" def __init__(self): self._action = None self._request = {} self._params = {'method': 'GET', 'url': None, 'params': {}, 'headers': {}, 'response_key': None, 'params_timestamp_key': 'timestamp', 'response_timestamp_key': 'timestamp'} def action(self, action): self._action = action def set(self, key, value): if not value: return self._request[key] = value def method(self, method): self._params['method'] = method def url(self, url, **args): for key in args: value = args[key] url = url.replace('{%s}' % key, str(value)) self._params['url'] = url def param(self, key, value): if not value: return self._params['params'][key] = value def header(self, name, value): self._params['headers'][name] = value def response_key(self, response_key): self._params['response_key'] = response_key def params_timestamp_key(self, params_timestamp_key): self._params['params_timestamp_key'] = params_timestamp_key def response_timestamp_key(self, response_timestamp_key): self._params['response_timestamp_key'] = response_timestamp_key def extract(self): return self._action, self._request, self._params
Add params_timestamp_key and response_timestamp_key methods
Add params_timestamp_key and response_timestamp_key methods
Python
apache-2.0
devicehive/devicehive-python
python
## Code Before: class ApiSubscribeRequest(object): """Api request class.""" def __init__(self): self._action = None self._request = {} self._params = {'method': 'GET', 'url': None, 'params': {}, 'headers': {}, 'response_key': None} def action(self, action): self._action = action def set(self, key, value): if not value: return self._request[key] = value def method(self, method): self._params['method'] = method def url(self, url, **args): for key in args: value = args[key] url = url.replace('{%s}' % key, str(value)) self._params['url'] = url def param(self, key, value): if not value: return self._params['params'][key] = value def header(self, name, value): self._params['headers'][name] = value def response_key(self, response_key): self._params['response_key'] = response_key def extract(self): return self._action, self._request, self._params ## Instruction: Add params_timestamp_key and response_timestamp_key methods ## Code After: class ApiSubscribeRequest(object): """Api request class.""" def __init__(self): self._action = None self._request = {} self._params = {'method': 'GET', 'url': None, 'params': {}, 'headers': {}, 'response_key': None, 'params_timestamp_key': 'timestamp', 'response_timestamp_key': 'timestamp'} def action(self, action): self._action = action def set(self, key, value): if not value: return self._request[key] = value def method(self, method): self._params['method'] = method def url(self, url, **args): for key in args: value = args[key] url = url.replace('{%s}' % key, str(value)) self._params['url'] = url def param(self, key, value): if not value: return self._params['params'][key] = value def header(self, name, value): self._params['headers'][name] = value def response_key(self, response_key): self._params['response_key'] = response_key def params_timestamp_key(self, params_timestamp_key): self._params['params_timestamp_key'] = params_timestamp_key def response_timestamp_key(self, response_timestamp_key): self._params['response_timestamp_key'] = response_timestamp_key def extract(self): return self._action, self._request, self._params
... 'url': None, 'params': {}, 'headers': {}, 'response_key': None, 'params_timestamp_key': 'timestamp', 'response_timestamp_key': 'timestamp'} def action(self, action): self._action = action ... def response_key(self, response_key): self._params['response_key'] = response_key def params_timestamp_key(self, params_timestamp_key): self._params['params_timestamp_key'] = params_timestamp_key def response_timestamp_key(self, response_timestamp_key): self._params['response_timestamp_key'] = response_timestamp_key def extract(self): return self._action, self._request, self._params ...
5cbfdaeb02fcf303e0b8d00b0ba4a91ba8320f53
wayland/input_device.h
wayland/input_device.h
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef OZONE_WAYLAND_INPUT_DEVICE_H_ #define OZONE_WAYLAND_INPUT_DEVICE_H_ #include <wayland-client.h> #include "base/basictypes.h" namespace ozonewayland { class WaylandKeyboard; class WaylandPointer; class WaylandDisplay; class WaylandWindow; class WaylandInputDevice { public: WaylandInputDevice(WaylandDisplay* display, uint32_t id); ~WaylandInputDevice(); wl_seat* GetInputSeat() { return input_seat_; } WaylandKeyboard* GetKeyBoard() const { return input_keyboard_; } WaylandPointer* GetPointer() const { return input_pointer_; } unsigned GetFocusWindowHandle() { return focused_window_handle_; } void SetFocusWindowHandle(unsigned windowhandle); private: static void OnSeatCapabilities(void *data, wl_seat *seat, uint32_t caps); // Keeps track of current focused window. unsigned focused_window_handle_; wl_seat* input_seat_; WaylandKeyboard* input_keyboard_; WaylandPointer* input_pointer_; DISALLOW_COPY_AND_ASSIGN(WaylandInputDevice); }; } // namespace ozonewayland #endif // OZONE_WAYLAND_INPUT_DEVICE_H_
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef OZONE_WAYLAND_INPUT_DEVICE_H_ #define OZONE_WAYLAND_INPUT_DEVICE_H_ #include <wayland-client.h> #include "base/basictypes.h" namespace ozonewayland { class WaylandKeyboard; class WaylandPointer; class WaylandDisplay; class WaylandWindow; class WaylandInputDevice { public: WaylandInputDevice(WaylandDisplay* display, uint32_t id); ~WaylandInputDevice(); wl_seat* GetInputSeat() const { return input_seat_; } WaylandKeyboard* GetKeyBoard() const { return input_keyboard_; } WaylandPointer* GetPointer() const { return input_pointer_; } unsigned GetFocusWindowHandle() const { return focused_window_handle_; } void SetFocusWindowHandle(unsigned windowhandle); private: static void OnSeatCapabilities(void *data, wl_seat *seat, uint32_t caps); // Keeps track of current focused window. unsigned focused_window_handle_; wl_seat* input_seat_; WaylandKeyboard* input_keyboard_; WaylandPointer* input_pointer_; DISALLOW_COPY_AND_ASSIGN(WaylandInputDevice); }; } // namespace ozonewayland #endif // OZONE_WAYLAND_INPUT_DEVICE_H_
Make read only functions as const.
Inputs: Make read only functions as const. Adds missing const keyword for Read only functions in WaylandInputDevice.
C
bsd-3-clause
01org/ozone-wayland,hongzhang-yan/ozone-wayland,rakuco/ozone-wayland,baillaw/ozone-wayland,qjia7/ozone-wayland,joone/ozone-wayland,nagineni/ozone-wayland,hongzhang-yan/ozone-wayland,kishansheshagiri/ozone-wayland,darktears/ozone-wayland,kuscsik/ozone-wayland,siteshwar/ozone-wayland,Tarnyko/ozone-wayland,Tarnyko/ozone-wayland,siteshwar/ozone-wayland,01org/ozone-wayland,kishansheshagiri/ozone-wayland,shaochangbin/ozone-wayland,hongzhang-yan/ozone-wayland,darktears/ozone-wayland,sjnewbury/ozone-wayland,clopez/ozone-wayland,joone/ozone-wayland,siteshwar/ozone-wayland,nicoguyo/ozone-wayland,01org/ozone-wayland,clopez/ozone-wayland,speedpat/ozone-wayland,mrunalk/ozone-wayland,shaochangbin/ozone-wayland,racarr-ubuntu/ozone-mir,tiagovignatti/ozone-wayland,joone/ozone-wayland,darktears/ozone-wayland,nicoguyo/ozone-wayland,ds-hwang/ozone-wayland,likewise/ozone-wayland,joone/ozone-wayland,AndriyP/ozone-wayland,hongzhang-yan/ozone-wayland,speedpat/ozone-wayland,shaochangbin/ozone-wayland,kuscsik/ozone-wayland,nagineni/ozone-wayland,baillaw/ozone-wayland,sjnewbury/ozone-wayland,kuscsik/ozone-wayland,kalyankondapally/ozone-wayland,nagineni/ozone-wayland,AndriyP/ozone-wayland,rakuco/ozone-wayland,AndriyP/ozone-wayland,kishansheshagiri/ozone-wayland,AndriyP/ozone-wayland,nicoguyo/ozone-wayland,baillaw/ozone-wayland,racarr-ubuntu/ozone-mir,nicoguyo/ozone-wayland,kuscsik/ozone-wayland,darktears/ozone-wayland,qjia7/ozone-wayland,rakuco/ozone-wayland,01org/ozone-wayland,tiagovignatti/ozone-wayland,ds-hwang/ozone-wayland,mrunalk/ozone-wayland,baillaw/ozone-wayland,speedpat/ozone-wayland,mrunalk/ozone-wayland,speedpat/ozone-wayland,kalyankondapally/ozone-wayland,Tarnyko/ozone-wayland,nagineni/ozone-wayland,likewise/ozone-wayland,qjia7/ozone-wayland,mrunalk/ozone-wayland,likewise/ozone-wayland,kalyankondapally/ozone-wayland,shaochangbin/ozone-wayland,sjnewbury/ozone-wayland,likewise/ozone-wayland,siteshwar/ozone-wayland,rakuco/ozone-wayland,ds-hwang/ozone-wayland,tiagovignatti/ozone-wayland,kalyankondapally/ozone-wayland,racarr-ubuntu/ozone-mir,kishansheshagiri/ozone-wayland,Tarnyko/ozone-wayland,qjia7/ozone-wayland,clopez/ozone-wayland,sjnewbury/ozone-wayland,tiagovignatti/ozone-wayland,racarr-ubuntu/ozone-mir,clopez/ozone-wayland,ds-hwang/ozone-wayland
c
## Code Before: // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef OZONE_WAYLAND_INPUT_DEVICE_H_ #define OZONE_WAYLAND_INPUT_DEVICE_H_ #include <wayland-client.h> #include "base/basictypes.h" namespace ozonewayland { class WaylandKeyboard; class WaylandPointer; class WaylandDisplay; class WaylandWindow; class WaylandInputDevice { public: WaylandInputDevice(WaylandDisplay* display, uint32_t id); ~WaylandInputDevice(); wl_seat* GetInputSeat() { return input_seat_; } WaylandKeyboard* GetKeyBoard() const { return input_keyboard_; } WaylandPointer* GetPointer() const { return input_pointer_; } unsigned GetFocusWindowHandle() { return focused_window_handle_; } void SetFocusWindowHandle(unsigned windowhandle); private: static void OnSeatCapabilities(void *data, wl_seat *seat, uint32_t caps); // Keeps track of current focused window. unsigned focused_window_handle_; wl_seat* input_seat_; WaylandKeyboard* input_keyboard_; WaylandPointer* input_pointer_; DISALLOW_COPY_AND_ASSIGN(WaylandInputDevice); }; } // namespace ozonewayland #endif // OZONE_WAYLAND_INPUT_DEVICE_H_ ## Instruction: Inputs: Make read only functions as const. Adds missing const keyword for Read only functions in WaylandInputDevice. ## Code After: // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef OZONE_WAYLAND_INPUT_DEVICE_H_ #define OZONE_WAYLAND_INPUT_DEVICE_H_ #include <wayland-client.h> #include "base/basictypes.h" namespace ozonewayland { class WaylandKeyboard; class WaylandPointer; class WaylandDisplay; class WaylandWindow; class WaylandInputDevice { public: WaylandInputDevice(WaylandDisplay* display, uint32_t id); ~WaylandInputDevice(); wl_seat* GetInputSeat() const { return input_seat_; } WaylandKeyboard* GetKeyBoard() const { return input_keyboard_; } WaylandPointer* GetPointer() const { return input_pointer_; } unsigned GetFocusWindowHandle() const { return focused_window_handle_; } void SetFocusWindowHandle(unsigned windowhandle); private: static void OnSeatCapabilities(void *data, wl_seat *seat, uint32_t caps); // Keeps track of current focused window. unsigned focused_window_handle_; wl_seat* input_seat_; WaylandKeyboard* input_keyboard_; WaylandPointer* input_pointer_; DISALLOW_COPY_AND_ASSIGN(WaylandInputDevice); }; } // namespace ozonewayland #endif // OZONE_WAYLAND_INPUT_DEVICE_H_
... WaylandInputDevice(WaylandDisplay* display, uint32_t id); ~WaylandInputDevice(); wl_seat* GetInputSeat() const { return input_seat_; } WaylandKeyboard* GetKeyBoard() const { return input_keyboard_; } WaylandPointer* GetPointer() const { return input_pointer_; } unsigned GetFocusWindowHandle() const { return focused_window_handle_; } void SetFocusWindowHandle(unsigned windowhandle); private: ...
549f82e4211b0e6af7884b9f8d83c636906cd845
src/java/blockmap/test/com/ibm/research/blockmap/CapiTestSupport.java
src/java/blockmap/test/com/ibm/research/blockmap/CapiTestSupport.java
/* IBM_PROLOG_BEGIN_TAG * This is an automatically generated prolog. * * $Source: src/java/test/blockmap/com/ibm/research/blockmap/CapiTestSupport.java $ * * IBM Data Engine for NoSQL - Power Systems Edition User Library Project * * Contributors Listed Below - COPYRIGHT 2015,2016,2017 * [+] International Business Machines Corp. * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * IBM_PROLOG_END_TAG */ package com.ibm.research.blockmap; import org.junit.Ignore; @Ignore class CapiTestSupport { static final String DEVICE_PATH = "amd64".equals(System.getProperty("os.arch")) ? null : "/dev/sdf"; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //static final String DEVICE_PATH="/dev/hdisk0"; }
/* IBM_PROLOG_BEGIN_TAG * This is an automatically generated prolog. * * $Source: src/java/test/blockmap/com/ibm/research/blockmap/CapiTestSupport.java $ * * IBM Data Engine for NoSQL - Power Systems Edition User Library Project * * Contributors Listed Below - COPYRIGHT 2015,2016,2017 * [+] International Business Machines Corp. * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * IBM_PROLOG_END_TAG */ package com.ibm.research.blockmap; import org.junit.Ignore; @Ignore class CapiTestSupport { static final String DEVICE_PATH = "amd64".equals(System.getProperty("os.arch")) ? null : System.getProperty("CAPI_DEVICE_PATH", "/dev/sdf"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //static final String DEVICE_PATH="/dev/hdisk0"; }
Use the CAPI_DEVICE_PATH property in blockmap tests
Use the CAPI_DEVICE_PATH property in blockmap tests
Java
apache-2.0
rellermeyer/capiflash,odaira/capiflash,rellermeyer/capiflash,odaira/capiflash,odaira/capiflash,odaira/capiflash,odaira/capiflash,rellermeyer/capiflash,rellermeyer/capiflash,rellermeyer/capiflash
java
## Code Before: /* IBM_PROLOG_BEGIN_TAG * This is an automatically generated prolog. * * $Source: src/java/test/blockmap/com/ibm/research/blockmap/CapiTestSupport.java $ * * IBM Data Engine for NoSQL - Power Systems Edition User Library Project * * Contributors Listed Below - COPYRIGHT 2015,2016,2017 * [+] International Business Machines Corp. * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * IBM_PROLOG_END_TAG */ package com.ibm.research.blockmap; import org.junit.Ignore; @Ignore class CapiTestSupport { static final String DEVICE_PATH = "amd64".equals(System.getProperty("os.arch")) ? null : "/dev/sdf"; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //static final String DEVICE_PATH="/dev/hdisk0"; } ## Instruction: Use the CAPI_DEVICE_PATH property in blockmap tests ## Code After: /* IBM_PROLOG_BEGIN_TAG * This is an automatically generated prolog. * * $Source: src/java/test/blockmap/com/ibm/research/blockmap/CapiTestSupport.java $ * * IBM Data Engine for NoSQL - Power Systems Edition User Library Project * * Contributors Listed Below - COPYRIGHT 2015,2016,2017 * [+] International Business Machines Corp. * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * IBM_PROLOG_END_TAG */ package com.ibm.research.blockmap; import org.junit.Ignore; @Ignore class CapiTestSupport { static final String DEVICE_PATH = "amd64".equals(System.getProperty("os.arch")) ? null : System.getProperty("CAPI_DEVICE_PATH", "/dev/sdf"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //static final String DEVICE_PATH="/dev/hdisk0"; }
... @Ignore class CapiTestSupport { static final String DEVICE_PATH = "amd64".equals(System.getProperty("os.arch")) ? null : System.getProperty("CAPI_DEVICE_PATH", "/dev/sdf"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //static final String DEVICE_PATH="/dev/hdisk0"; } ...
d81b0e3ebc01433e973efd01953cde5101159e3b
src/main/java/id/ac/itb/sigit/pengenalanpola/CharDef.java
src/main/java/id/ac/itb/sigit/pengenalanpola/CharDef.java
package id.ac.itb.sigit.pengenalanpola; import java.util.ArrayList; import java.util.List; /** * Created by Sigit on 22/09/2015. */ public class CharDef { private String character; private String chainCode; private List<String> subChainCode=new ArrayList<>(); public String getCharacter() { return character; } public void setCharacter(String character) { this.character = character; } public String getChainCode() { return chainCode; } public void setChainCode(String chainCode) { this.chainCode = chainCode; } public List<String> getSubChainCode() { return subChainCode; } }
package id.ac.itb.sigit.pengenalanpola; import java.util.ArrayList; import java.util.List; /** * Created by Sigit on 22/09/2015. */ public class CharDef { private String character; private String chainCode; private String dirChainCode; private String relChainCode; private List<String> subChainCode=new ArrayList<>(); public String getCharacter() { return character; } public void setCharacter(String character) { this.character = character; } public String getChainCode() { return chainCode; } public void setChainCode(String chainCode) { this.chainCode = chainCode; } public List<String> getSubChainCode() { return subChainCode; } public String getDirChainCode() { return dirChainCode; } public String getRelChainCode() { return relChainCode; } public void calcDirChainCode(){ dirChainCode = ""; for(int i=1; i < chainCode.length(); i++){ int a = Integer.parseInt(chainCode.charAt(i) + ""); int b = Integer.parseInt(chainCode.charAt(i - 1) + ""); if(a == b){ }else if(a < b){ dirChainCode = dirChainCode + "-"; }else{ dirChainCode = dirChainCode + "+"; } } } public void calcRelChainCode(){ relChainCode = ""; for(int i=1; i < chainCode.length(); i++){ int a = Integer.parseInt(chainCode.charAt(i) + ""); int b = Integer.parseInt(chainCode.charAt(i - 1) + ""); if(a < b){ relChainCode = relChainCode + (a - b + 8); }else{ relChainCode = relChainCode + (a - b); } } } }
Add Directional Chain Code & Relative Chain Code
Add Directional Chain Code & Relative Chain Code
Java
epl-1.0
tmdgitb/pengenalan_pola_sigit_hendy_ilham,tmdgitb/pengenalan_pola_sigit_hendy_ilham,tmdgitb/pengenalan_pola_shih,tmdgitb/pengenalan_pola_shih
java
## Code Before: package id.ac.itb.sigit.pengenalanpola; import java.util.ArrayList; import java.util.List; /** * Created by Sigit on 22/09/2015. */ public class CharDef { private String character; private String chainCode; private List<String> subChainCode=new ArrayList<>(); public String getCharacter() { return character; } public void setCharacter(String character) { this.character = character; } public String getChainCode() { return chainCode; } public void setChainCode(String chainCode) { this.chainCode = chainCode; } public List<String> getSubChainCode() { return subChainCode; } } ## Instruction: Add Directional Chain Code & Relative Chain Code ## Code After: package id.ac.itb.sigit.pengenalanpola; import java.util.ArrayList; import java.util.List; /** * Created by Sigit on 22/09/2015. */ public class CharDef { private String character; private String chainCode; private String dirChainCode; private String relChainCode; private List<String> subChainCode=new ArrayList<>(); public String getCharacter() { return character; } public void setCharacter(String character) { this.character = character; } public String getChainCode() { return chainCode; } public void setChainCode(String chainCode) { this.chainCode = chainCode; } public List<String> getSubChainCode() { return subChainCode; } public String getDirChainCode() { return dirChainCode; } public String getRelChainCode() { return relChainCode; } public void calcDirChainCode(){ dirChainCode = ""; for(int i=1; i < chainCode.length(); i++){ int a = Integer.parseInt(chainCode.charAt(i) + ""); int b = Integer.parseInt(chainCode.charAt(i - 1) + ""); if(a == b){ }else if(a < b){ dirChainCode = dirChainCode + "-"; }else{ dirChainCode = dirChainCode + "+"; } } } public void calcRelChainCode(){ relChainCode = ""; for(int i=1; i < chainCode.length(); i++){ int a = Integer.parseInt(chainCode.charAt(i) + ""); int b = Integer.parseInt(chainCode.charAt(i - 1) + ""); if(a < b){ relChainCode = relChainCode + (a - b + 8); }else{ relChainCode = relChainCode + (a - b); } } } }
... public class CharDef { private String character; private String chainCode; private String dirChainCode; private String relChainCode; private List<String> subChainCode=new ArrayList<>(); public String getCharacter() { ... return subChainCode; } public String getDirChainCode() { return dirChainCode; } public String getRelChainCode() { return relChainCode; } public void calcDirChainCode(){ dirChainCode = ""; for(int i=1; i < chainCode.length(); i++){ int a = Integer.parseInt(chainCode.charAt(i) + ""); int b = Integer.parseInt(chainCode.charAt(i - 1) + ""); if(a == b){ }else if(a < b){ dirChainCode = dirChainCode + "-"; }else{ dirChainCode = dirChainCode + "+"; } } } public void calcRelChainCode(){ relChainCode = ""; for(int i=1; i < chainCode.length(); i++){ int a = Integer.parseInt(chainCode.charAt(i) + ""); int b = Integer.parseInt(chainCode.charAt(i - 1) + ""); if(a < b){ relChainCode = relChainCode + (a - b + 8); }else{ relChainCode = relChainCode + (a - b); } } } } ...
ca7403462588f374cf1af39d537765c02fc7726c
mctrl/rest.py
mctrl/rest.py
from flask import Flask, url_for, Response, json, request class MonitorApp(object): def __init__(self, monitor): self.app = Flask(__name__) self.app.monitor = monitor self.setup() def setup(self): @self.app.route('/anomaly', methods = ['POST']) def api_anomaly(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_anomaly_data(data) return handle_response(success) else: return Response("Unsupported media type\n" + data, status=415) @self.app.route('/monitor', methods = ['POST']) def api_monitor(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_monitor_flows(data) return handle_response(success) else: return Response("Unsupported media type\n" + data, status=415) def handle_response(self, success): if success: return Response("OK\n" + data, status=status) else: return Response("BAD REQUEST\n" + data, status=status)
from flask import Flask, url_for, Response, json, request class MonitorApp(object): def __init__(self, monitor): self.app = Flask(__name__) self.app.monitor = monitor self.setup() def setup(self): @self.app.route('/anomaly', methods = ['POST']) def api_anomaly(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_anomaly_data(data) return self.handle_response(success, data) else: return Response("Unsupported media type\n" + data, status=415) @self.app.route('/monitor', methods = ['POST']) def api_monitor(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_monitor_flows(data) return self.handle_response(success, data) else: return Response("Unsupported media type\n" + data, status=415) def handle_response(self, success, data): json_data = json.dumps(data) if success: return Response("OK\n" + json_data, status=200) else: return Response("BAD REQUEST\n" + json_data, status=400)
Fix status codes of handled responses
Fix status codes of handled responses
Python
apache-2.0
h2020-endeavour/endeavour,h2020-endeavour/endeavour
python
## Code Before: from flask import Flask, url_for, Response, json, request class MonitorApp(object): def __init__(self, monitor): self.app = Flask(__name__) self.app.monitor = monitor self.setup() def setup(self): @self.app.route('/anomaly', methods = ['POST']) def api_anomaly(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_anomaly_data(data) return handle_response(success) else: return Response("Unsupported media type\n" + data, status=415) @self.app.route('/monitor', methods = ['POST']) def api_monitor(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_monitor_flows(data) return handle_response(success) else: return Response("Unsupported media type\n" + data, status=415) def handle_response(self, success): if success: return Response("OK\n" + data, status=status) else: return Response("BAD REQUEST\n" + data, status=status) ## Instruction: Fix status codes of handled responses ## Code After: from flask import Flask, url_for, Response, json, request class MonitorApp(object): def __init__(self, monitor): self.app = Flask(__name__) self.app.monitor = monitor self.setup() def setup(self): @self.app.route('/anomaly', methods = ['POST']) def api_anomaly(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_anomaly_data(data) return self.handle_response(success, data) else: return Response("Unsupported media type\n" + data, status=415) @self.app.route('/monitor', methods = ['POST']) def api_monitor(): data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_monitor_flows(data) return self.handle_response(success, data) else: return Response("Unsupported media type\n" + data, status=415) def handle_response(self, success, data): json_data = json.dumps(data) if success: return Response("OK\n" + json_data, status=200) else: return Response("BAD REQUEST\n" + json_data, status=400)
# ... existing code ... data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_anomaly_data(data) return self.handle_response(success, data) else: return Response("Unsupported media type\n" + data, status=415) # ... modified code ... data = request.json if request.headers['Content-Type'] == 'application/json': success = self.app.monitor.process_monitor_flows(data) return self.handle_response(success, data) else: return Response("Unsupported media type\n" + data, status=415) def handle_response(self, success, data): json_data = json.dumps(data) if success: return Response("OK\n" + json_data, status=200) else: return Response("BAD REQUEST\n" + json_data, status=400) # ... rest of the code ...
36a1a9db0de4b21f03478f30e2c2447adb5e77e2
src/main/java/io/github/vcuswimlab/stackintheflow/controller/component/PersistProfileComponent.java
src/main/java/io/github/vcuswimlab/stackintheflow/controller/component/PersistProfileComponent.java
package io.github.vcuswimlab.stackintheflow.controller.component; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.util.xmlb.XmlSerializerUtil; import org.jetbrains.annotations.Nullable; import java.util.HashMap; import java.util.Map; /** * <h1>PersistProfileComponent</h1> * Created on: 8/1/2017 * * @author Tyler John Haden */ @State( name = "ProfileState", storages = { @Storage( id = "stack-overflow", file = "$PROJECT_CONFIG_DIR$/stackoverflow-profile.xml") }) public class PersistProfileComponent implements PersistentStateComponent<PersistProfileComponent> { private Map<String, Integer> userStatMap; public Map<String, Integer> getUserStatMap() { return userStatMap; } @Override public void noStateLoaded() { userStatMap = new HashMap<>(); } @Override public void loadState(PersistProfileComponent state) { XmlSerializerUtil.copyBean(state, this); } @Nullable @Override public PersistProfileComponent getState() { return this; } }
package io.github.vcuswimlab.stackintheflow.controller.component; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.util.xmlb.XmlSerializerUtil; import org.jetbrains.annotations.Nullable; import java.util.HashMap; import java.util.Map; /** * <h1>PersistProfileComponent</h1> * Created on: 8/1/2017 * * @author Tyler John Haden */ @State( name = "ProfileState", storages = { @Storage("stackoverflow-profile.xml") }) public class PersistProfileComponent implements PersistentStateComponent<PersistProfileComponent> { public Map<String, Integer> userStatMap; public Map<String, Integer> getUserStatMap() { return userStatMap; } @Override public void noStateLoaded() { userStatMap = new HashMap<>(); } @Override public void loadState(PersistProfileComponent state) { XmlSerializerUtil.copyBean(state, this); } @Nullable @Override public PersistProfileComponent getState() { return this; } }
Fix user profile not serializing.
Fix user profile not serializing.
Java
mit
vcu-swim-lab/stack-intheflow,vcu-swim-lab/stack-intheflow,vcu-swim-lab/stack-intheflow
java
## Code Before: package io.github.vcuswimlab.stackintheflow.controller.component; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.util.xmlb.XmlSerializerUtil; import org.jetbrains.annotations.Nullable; import java.util.HashMap; import java.util.Map; /** * <h1>PersistProfileComponent</h1> * Created on: 8/1/2017 * * @author Tyler John Haden */ @State( name = "ProfileState", storages = { @Storage( id = "stack-overflow", file = "$PROJECT_CONFIG_DIR$/stackoverflow-profile.xml") }) public class PersistProfileComponent implements PersistentStateComponent<PersistProfileComponent> { private Map<String, Integer> userStatMap; public Map<String, Integer> getUserStatMap() { return userStatMap; } @Override public void noStateLoaded() { userStatMap = new HashMap<>(); } @Override public void loadState(PersistProfileComponent state) { XmlSerializerUtil.copyBean(state, this); } @Nullable @Override public PersistProfileComponent getState() { return this; } } ## Instruction: Fix user profile not serializing. ## Code After: package io.github.vcuswimlab.stackintheflow.controller.component; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.util.xmlb.XmlSerializerUtil; import org.jetbrains.annotations.Nullable; import java.util.HashMap; import java.util.Map; /** * <h1>PersistProfileComponent</h1> * Created on: 8/1/2017 * * @author Tyler John Haden */ @State( name = "ProfileState", storages = { @Storage("stackoverflow-profile.xml") }) public class PersistProfileComponent implements PersistentStateComponent<PersistProfileComponent> { public Map<String, Integer> userStatMap; public Map<String, Integer> getUserStatMap() { return userStatMap; } @Override public void noStateLoaded() { userStatMap = new HashMap<>(); } @Override public void loadState(PersistProfileComponent state) { XmlSerializerUtil.copyBean(state, this); } @Nullable @Override public PersistProfileComponent getState() { return this; } }
... @State( name = "ProfileState", storages = { @Storage("stackoverflow-profile.xml") }) public class PersistProfileComponent implements PersistentStateComponent<PersistProfileComponent> { public Map<String, Integer> userStatMap; public Map<String, Integer> getUserStatMap() { return userStatMap; ...
2f055184a3832d4a44b151f6c3caf4089e80aa6d
devicehive/device_hive_api.py
devicehive/device_hive_api.py
from devicehive.handler import Handler from devicehive.device_hive import DeviceHive class ApiCallHandler(Handler): """Api call handler class.""" def __init__(self, api, call, *call_args, **call_kwargs): super(ApiCallHandler, self).__init__(api) self._call = call self._call_args = call_args self._call_kwargs = call_kwargs self._call_result = None @property def call_result(self): return self._call_result def handle_connect(self): self._call_result = getattr(self.api, self._call)(*self._call_args, **self._call_kwargs) self.api.disconnect() class DeviceHiveApi(object): """Device hive api class.""" def __init__(self, transport_url, **options): self._transport_url = transport_url self._options = options def _call(self, call, *call_args, **call_kwargs): device_hive = DeviceHive(ApiCallHandler, call, *call_args, **call_kwargs) device_hive.connect(self._transport_url, **self._options) return device_hive.transport.handler.handler.call_result def get_info(self): return self._call('get_info') def get_cluster_info(self): return self._call('get_cluster_info')
from devicehive.handler import Handler from devicehive.device_hive import DeviceHive class ApiCallHandler(Handler): """Api call handler class.""" def __init__(self, api, call, *call_args, **call_kwargs): super(ApiCallHandler, self).__init__(api) self._call = call self._call_args = call_args self._call_kwargs = call_kwargs self._call_result = None @property def call_result(self): return self._call_result def handle_connect(self): self._call_result = getattr(self.api, self._call)(*self._call_args, **self._call_kwargs) self.api.disconnect() class DeviceHiveApi(object): """Device hive api class.""" def __init__(self, transport_url, **options): self._transport_url = transport_url self._options = options def _call(self, call, *call_args, **call_kwargs): device_hive = DeviceHive(ApiCallHandler, call, *call_args, **call_kwargs) device_hive.connect(self._transport_url, **self._options) return device_hive.transport.handler.handler.call_result def get_info(self): return self._call('get_info') def get_cluster_info(self): return self._call('get_cluster_info') def get_property(self, name): return self._call('get_property', name) def set_property(self, name, value): return self._call('set_property', name, value) def delete_property(self, name): return self._call('delete_property', name)
Add get_property, set_property and delete_property functions
Add get_property, set_property and delete_property functions
Python
apache-2.0
devicehive/devicehive-python
python
## Code Before: from devicehive.handler import Handler from devicehive.device_hive import DeviceHive class ApiCallHandler(Handler): """Api call handler class.""" def __init__(self, api, call, *call_args, **call_kwargs): super(ApiCallHandler, self).__init__(api) self._call = call self._call_args = call_args self._call_kwargs = call_kwargs self._call_result = None @property def call_result(self): return self._call_result def handle_connect(self): self._call_result = getattr(self.api, self._call)(*self._call_args, **self._call_kwargs) self.api.disconnect() class DeviceHiveApi(object): """Device hive api class.""" def __init__(self, transport_url, **options): self._transport_url = transport_url self._options = options def _call(self, call, *call_args, **call_kwargs): device_hive = DeviceHive(ApiCallHandler, call, *call_args, **call_kwargs) device_hive.connect(self._transport_url, **self._options) return device_hive.transport.handler.handler.call_result def get_info(self): return self._call('get_info') def get_cluster_info(self): return self._call('get_cluster_info') ## Instruction: Add get_property, set_property and delete_property functions ## Code After: from devicehive.handler import Handler from devicehive.device_hive import DeviceHive class ApiCallHandler(Handler): """Api call handler class.""" def __init__(self, api, call, *call_args, **call_kwargs): super(ApiCallHandler, self).__init__(api) self._call = call self._call_args = call_args self._call_kwargs = call_kwargs self._call_result = None @property def call_result(self): return self._call_result def handle_connect(self): self._call_result = getattr(self.api, self._call)(*self._call_args, **self._call_kwargs) self.api.disconnect() class DeviceHiveApi(object): """Device hive api class.""" def __init__(self, transport_url, **options): self._transport_url = transport_url self._options = options def _call(self, call, *call_args, **call_kwargs): device_hive = DeviceHive(ApiCallHandler, call, *call_args, **call_kwargs) device_hive.connect(self._transport_url, **self._options) return device_hive.transport.handler.handler.call_result def get_info(self): return self._call('get_info') def get_cluster_info(self): return self._call('get_cluster_info') def get_property(self, name): return self._call('get_property', name) def set_property(self, name, value): return self._call('set_property', name, value) def delete_property(self, name): return self._call('delete_property', name)
# ... existing code ... def get_cluster_info(self): return self._call('get_cluster_info') def get_property(self, name): return self._call('get_property', name) def set_property(self, name, value): return self._call('set_property', name, value) def delete_property(self, name): return self._call('delete_property', name) # ... rest of the code ...
652bca441489dd49552cbd5945605d51921394f0
snowfloat/settings.py
snowfloat/settings.py
"""Client global settings.""" HOST = 'api.snowfloat.com:443' HTTP_TIMEOUT = 10 HTTP_RETRIES = 3 HTTP_RETRY_INTERVAL = 5 API_KEY = '' API_PRIVATE_KEY = '' try: # pylint: disable=F0401 from settings_prod import * except ImportError: try: # pylint: disable=F0401 from settings_dev import * except ImportError: pass
"""Client global settings.""" import os import ConfigParser HOST = 'api.snowfloat.com:443' HTTP_TIMEOUT = 10 HTTP_RETRIES = 3 HTTP_RETRY_INTERVAL = 5 API_KEY = '' API_PRIVATE_KEY = '' CONFIG = ConfigParser.RawConfigParser() for loc in (os.curdir, os.path.expanduser("~"), "/etc/snowfloat"): try: with open(os.path.join(loc, "snowfloat.conf")) as source: CONFIG.readfp(source) API_KEY = CONFIG.get('snowfloat', 'api_key') API_PRIVATE_KEY = CONFIG.get('snowfloat', 'api_private_key') break except IOError: pass
Read config file in different locations and set global config variables based on that.
Read config file in different locations and set global config variables based on that.
Python
bsd-3-clause
snowfloat/snowfloat-python,snowfloat/snowfloat-python
python
## Code Before: """Client global settings.""" HOST = 'api.snowfloat.com:443' HTTP_TIMEOUT = 10 HTTP_RETRIES = 3 HTTP_RETRY_INTERVAL = 5 API_KEY = '' API_PRIVATE_KEY = '' try: # pylint: disable=F0401 from settings_prod import * except ImportError: try: # pylint: disable=F0401 from settings_dev import * except ImportError: pass ## Instruction: Read config file in different locations and set global config variables based on that. ## Code After: """Client global settings.""" import os import ConfigParser HOST = 'api.snowfloat.com:443' HTTP_TIMEOUT = 10 HTTP_RETRIES = 3 HTTP_RETRY_INTERVAL = 5 API_KEY = '' API_PRIVATE_KEY = '' CONFIG = ConfigParser.RawConfigParser() for loc in (os.curdir, os.path.expanduser("~"), "/etc/snowfloat"): try: with open(os.path.join(loc, "snowfloat.conf")) as source: CONFIG.readfp(source) API_KEY = CONFIG.get('snowfloat', 'api_key') API_PRIVATE_KEY = CONFIG.get('snowfloat', 'api_private_key') break except IOError: pass
// ... existing code ... """Client global settings.""" import os import ConfigParser HOST = 'api.snowfloat.com:443' HTTP_TIMEOUT = 10 // ... modified code ... API_KEY = '' API_PRIVATE_KEY = '' CONFIG = ConfigParser.RawConfigParser() for loc in (os.curdir, os.path.expanduser("~"), "/etc/snowfloat"): try: with open(os.path.join(loc, "snowfloat.conf")) as source: CONFIG.readfp(source) API_KEY = CONFIG.get('snowfloat', 'api_key') API_PRIVATE_KEY = CONFIG.get('snowfloat', 'api_private_key') break except IOError: pass // ... rest of the code ...
9489e8512df9e073ac019c75f827c03fe64242dd
sorts/bubble_sort.py
sorts/bubble_sort.py
from __future__ import print_function def bubble_sort(collection): """Pure implementation of bubble sort algorithm in Python :param collection: some mutable ordered collection with heterogeneous comparable items inside :return: the same collection ordered by ascending Examples: >>> bubble_sort([0, 5, 3, 2, 2]) [0, 2, 2, 3, 5] >>> bubble_sort([]) [] >>> bubble_sort([-2, -5, -45]) [-45, -5, -2] """ length = len(collection) for i in range(length): for j in range(length-1): if collection[j] > collection[j+1]: collection[j], collection[j+1] = collection[j+1], collection[j] return collection if __name__ == '__main__': try: raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] print(bubble_sort(unsorted))
from __future__ import print_function def bubble_sort(collection): """Pure implementation of bubble sort algorithm in Python :param collection: some mutable ordered collection with heterogeneous comparable items inside :return: the same collection ordered by ascending Examples: >>> bubble_sort([0, 5, 3, 2, 2]) [0, 2, 2, 3, 5] >>> bubble_sort([]) [] >>> bubble_sort([-2, -5, -45]) [-45, -5, -2] """ length = len(collection) for i in range(length): swapped = False for j in range(length-1): if collection[j] > collection[j+1]: swapped = True collection[j], collection[j+1] = collection[j+1], collection[j] if not swapped: break # Stop iteration if the collection is sorted. return collection if __name__ == '__main__': try: raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] print(bubble_sort(unsorted))
Break if the collection is sorted
Break if the collection is sorted
Python
mit
TheAlgorithms/Python
python
## Code Before: from __future__ import print_function def bubble_sort(collection): """Pure implementation of bubble sort algorithm in Python :param collection: some mutable ordered collection with heterogeneous comparable items inside :return: the same collection ordered by ascending Examples: >>> bubble_sort([0, 5, 3, 2, 2]) [0, 2, 2, 3, 5] >>> bubble_sort([]) [] >>> bubble_sort([-2, -5, -45]) [-45, -5, -2] """ length = len(collection) for i in range(length): for j in range(length-1): if collection[j] > collection[j+1]: collection[j], collection[j+1] = collection[j+1], collection[j] return collection if __name__ == '__main__': try: raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] print(bubble_sort(unsorted)) ## Instruction: Break if the collection is sorted ## Code After: from __future__ import print_function def bubble_sort(collection): """Pure implementation of bubble sort algorithm in Python :param collection: some mutable ordered collection with heterogeneous comparable items inside :return: the same collection ordered by ascending Examples: >>> bubble_sort([0, 5, 3, 2, 2]) [0, 2, 2, 3, 5] >>> bubble_sort([]) [] >>> bubble_sort([-2, -5, -45]) [-45, -5, -2] """ length = len(collection) for i in range(length): swapped = False for j in range(length-1): if collection[j] > collection[j+1]: swapped = True collection[j], collection[j+1] = collection[j+1], collection[j] if not swapped: break # Stop iteration if the collection is sorted. return collection if __name__ == '__main__': try: raw_input # Python 2 except NameError: raw_input = input # Python 3 user_input = raw_input('Enter numbers separated by a comma:\n').strip() unsorted = [int(item) for item in user_input.split(',')] print(bubble_sort(unsorted))
... """ length = len(collection) for i in range(length): swapped = False for j in range(length-1): if collection[j] > collection[j+1]: swapped = True collection[j], collection[j+1] = collection[j+1], collection[j] if not swapped: break # Stop iteration if the collection is sorted. return collection ...
3fa49eda98233f4cd76cf4f3b9b1fc02006fb2de
website/search/mutation_result.py
website/search/mutation_result.py
from models import Protein, Mutation class SearchResult: def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs): self.protein = protein self.mutation = mutation self.is_mutation_novel = is_mutation_novel self.type = type self.meta_user = None self.__dict__.update(kwargs) def __getstate__(self): state = self.__dict__.copy() state['protein_refseq'] = self.protein.refseq del state['protein'] state['mutation_kwargs'] = { 'position': self.mutation.position, 'alt': self.mutation.alt } del state['mutation'] state['meta_user'].mutation = None return state def __setstate__(self, state): state['protein'] = Protein.query.filter_by( refseq=state['protein_refseq'] ).one() del state['protein_refseq'] state['mutation'] = Mutation.query.filter_by( protein=state['protein'], **state['mutation_kwargs'] ).one() del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] state['mutation'].meta_user = state['meta_user'] self.__dict__.update(state)
from models import Protein, Mutation from database import get_or_create class SearchResult: def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs): self.protein = protein self.mutation = mutation self.is_mutation_novel = is_mutation_novel self.type = type self.meta_user = None self.__dict__.update(kwargs) def __getstate__(self): state = self.__dict__.copy() state['protein_refseq'] = self.protein.refseq del state['protein'] state['mutation_kwargs'] = { 'position': self.mutation.position, 'alt': self.mutation.alt } del state['mutation'] state['meta_user'].mutation = None return state def __setstate__(self, state): state['protein'] = Protein.query.filter_by( refseq=state['protein_refseq'] ).one() del state['protein_refseq'] state['mutation'], created = get_or_create( Mutation, protein=state['protein'], **state['mutation_kwargs'] ) del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] state['mutation'].meta_user = state['meta_user'] self.__dict__.update(state)
Fix result loading for novel mutations
Fix result loading for novel mutations
Python
lgpl-2.1
reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations
python
## Code Before: from models import Protein, Mutation class SearchResult: def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs): self.protein = protein self.mutation = mutation self.is_mutation_novel = is_mutation_novel self.type = type self.meta_user = None self.__dict__.update(kwargs) def __getstate__(self): state = self.__dict__.copy() state['protein_refseq'] = self.protein.refseq del state['protein'] state['mutation_kwargs'] = { 'position': self.mutation.position, 'alt': self.mutation.alt } del state['mutation'] state['meta_user'].mutation = None return state def __setstate__(self, state): state['protein'] = Protein.query.filter_by( refseq=state['protein_refseq'] ).one() del state['protein_refseq'] state['mutation'] = Mutation.query.filter_by( protein=state['protein'], **state['mutation_kwargs'] ).one() del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] state['mutation'].meta_user = state['meta_user'] self.__dict__.update(state) ## Instruction: Fix result loading for novel mutations ## Code After: from models import Protein, Mutation from database import get_or_create class SearchResult: def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs): self.protein = protein self.mutation = mutation self.is_mutation_novel = is_mutation_novel self.type = type self.meta_user = None self.__dict__.update(kwargs) def __getstate__(self): state = self.__dict__.copy() state['protein_refseq'] = self.protein.refseq del state['protein'] state['mutation_kwargs'] = { 'position': self.mutation.position, 'alt': self.mutation.alt } del state['mutation'] state['meta_user'].mutation = None return state def __setstate__(self, state): state['protein'] = Protein.query.filter_by( refseq=state['protein_refseq'] ).one() del state['protein_refseq'] state['mutation'], created = get_or_create( Mutation, protein=state['protein'], **state['mutation_kwargs'] ) del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] state['mutation'].meta_user = state['meta_user'] self.__dict__.update(state)
... from models import Protein, Mutation from database import get_or_create class SearchResult: ... ).one() del state['protein_refseq'] state['mutation'], created = get_or_create( Mutation, protein=state['protein'], **state['mutation_kwargs'] ) del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] ...
889a2349efa1b76fd92981210798dc3e2d38d711
setup.py
setup.py
import os import subprocess from setuptools import setup import kvadratnet def readme(): """ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ # This will fail if pandoc is not in system path. subprocess.call( [ "pandoc", "readme.md", "--from", "markdown", "--to", "rst", "-s", "-o", "readme.rst", ] ) with open("readme.rst") as f: readme = f.read() os.remove("readme.rst") return readme setup( name="kvadratnet", version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: ISC License (ISCL)", "Topic :: Scientific/Engineering :: GIS", "Topic :: Utilities", ], entry_points={"console_scripts": ["knet=knet:main"]}, keywords="kvadratnet gis tiling", url="https://github.com/kbevers/kvadratnet", author="Kristian Evers", author_email="[email protected]", license="ISC", py_modules=["kvadratnet", "knet"], test_suite="nose.collector", tests_require=["nose"], )
from setuptools import setup import kvadratnet def readme(): """ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ with open("readme.md") as readme_file: descr = readme_file.read() return descr setup( name="kvadratnet", version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), long_description_content_type="text/markdown", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: ISC License (ISCL)", "Topic :: Scientific/Engineering :: GIS", "Topic :: Utilities", ], entry_points={"console_scripts": ["knet=knet:main"]}, keywords="kvadratnet gis tiling", url="https://github.com/kbevers/kvadratnet", author="Kristian Evers", author_email="[email protected]", license="ISC", py_modules=["kvadratnet", "knet"], test_suite="nose.collector", tests_require=["nose"], )
Use unaltered markdown readme for long_description
Use unaltered markdown readme for long_description
Python
isc
kbevers/kvadratnet,kbevers/kvadratnet
python
## Code Before: import os import subprocess from setuptools import setup import kvadratnet def readme(): """ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ # This will fail if pandoc is not in system path. subprocess.call( [ "pandoc", "readme.md", "--from", "markdown", "--to", "rst", "-s", "-o", "readme.rst", ] ) with open("readme.rst") as f: readme = f.read() os.remove("readme.rst") return readme setup( name="kvadratnet", version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: ISC License (ISCL)", "Topic :: Scientific/Engineering :: GIS", "Topic :: Utilities", ], entry_points={"console_scripts": ["knet=knet:main"]}, keywords="kvadratnet gis tiling", url="https://github.com/kbevers/kvadratnet", author="Kristian Evers", author_email="[email protected]", license="ISC", py_modules=["kvadratnet", "knet"], test_suite="nose.collector", tests_require=["nose"], ) ## Instruction: Use unaltered markdown readme for long_description ## Code After: from setuptools import setup import kvadratnet def readme(): """ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ with open("readme.md") as readme_file: descr = readme_file.read() return descr setup( name="kvadratnet", version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), long_description_content_type="text/markdown", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: ISC License (ISCL)", "Topic :: Scientific/Engineering :: GIS", "Topic :: Utilities", ], entry_points={"console_scripts": ["knet=knet:main"]}, keywords="kvadratnet gis tiling", url="https://github.com/kbevers/kvadratnet", author="Kristian Evers", author_email="[email protected]", license="ISC", py_modules=["kvadratnet", "knet"], test_suite="nose.collector", tests_require=["nose"], )
// ... existing code ... from setuptools import setup import kvadratnet // ... modified code ... Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ with open("readme.md") as readme_file: descr = readme_file.read() return descr setup( ... version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), long_description_content_type="text/markdown", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", // ... rest of the code ...
a179b2afff8af8dce5ae816d6f97a002a9151cf4
booger_test.py
booger_test.py
from unittest import TestCase ################################################################################ # Nosetest parser from booger import NOSE_DIV_WIDTH, NosetestsParser class NosetestsParserTest(TestCase): def setUp(self): self.parser = NosetestsParser() def short_output_test(self): inp = '=' * 70 out, end = self.parser.parse_short_output(inp) assert end == True
from unittest import TestCase ################################################################################ # Nosetest parser from booger import NOSE_DIV_WIDTH, NosetestsParser class NosetestsParserTest(TestCase): def setUp(self): self.parser = NosetestsParser() def short_output_end_test(self): ''' Make sure we recognise the end of the short output ''' inp = '=' * 70 out, end = self.parser.parse_short_output(inp) assert end == False def short_output_ok_test(self): ''' Recognize `msg ... ok` messages ''' msg = 'msg ... ok' out, end = self.parser.parse_short_output(msg) assert out == 'ok' def short_output_fail_test(self): ''' Recognize `msg ... FAIL` messages ''' msg = 'msg ... FAIL' out, end = self.parser.parse_short_output(msg) assert out == 'fail' def short_output_error_test(self): ''' Recognize `msg ... ERROR` messages ''' msg = 'msg ... ERROR' out, end = self.parser.parse_short_output(msg) assert out == 'error'
Write some more tests for the short nosetests parser
Write some more tests for the short nosetests parser
Python
mit
thenoviceoof/booger,thenoviceoof/booger
python
## Code Before: from unittest import TestCase ################################################################################ # Nosetest parser from booger import NOSE_DIV_WIDTH, NosetestsParser class NosetestsParserTest(TestCase): def setUp(self): self.parser = NosetestsParser() def short_output_test(self): inp = '=' * 70 out, end = self.parser.parse_short_output(inp) assert end == True ## Instruction: Write some more tests for the short nosetests parser ## Code After: from unittest import TestCase ################################################################################ # Nosetest parser from booger import NOSE_DIV_WIDTH, NosetestsParser class NosetestsParserTest(TestCase): def setUp(self): self.parser = NosetestsParser() def short_output_end_test(self): ''' Make sure we recognise the end of the short output ''' inp = '=' * 70 out, end = self.parser.parse_short_output(inp) assert end == False def short_output_ok_test(self): ''' Recognize `msg ... ok` messages ''' msg = 'msg ... ok' out, end = self.parser.parse_short_output(msg) assert out == 'ok' def short_output_fail_test(self): ''' Recognize `msg ... FAIL` messages ''' msg = 'msg ... FAIL' out, end = self.parser.parse_short_output(msg) assert out == 'fail' def short_output_error_test(self): ''' Recognize `msg ... ERROR` messages ''' msg = 'msg ... ERROR' out, end = self.parser.parse_short_output(msg) assert out == 'error'
# ... existing code ... class NosetestsParserTest(TestCase): def setUp(self): self.parser = NosetestsParser() def short_output_end_test(self): ''' Make sure we recognise the end of the short output ''' inp = '=' * 70 out, end = self.parser.parse_short_output(inp) assert end == False def short_output_ok_test(self): ''' Recognize `msg ... ok` messages ''' msg = 'msg ... ok' out, end = self.parser.parse_short_output(msg) assert out == 'ok' def short_output_fail_test(self): ''' Recognize `msg ... FAIL` messages ''' msg = 'msg ... FAIL' out, end = self.parser.parse_short_output(msg) assert out == 'fail' def short_output_error_test(self): ''' Recognize `msg ... ERROR` messages ''' msg = 'msg ... ERROR' out, end = self.parser.parse_short_output(msg) assert out == 'error' # ... rest of the code ...
6d7910deebeb68e12c7d7f721c54ada031560024
src/WhiteLibrary/keywords/items/textbox.py
src/WhiteLibrary/keywords/items/textbox.py
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
Change to better argument name
Change to better argument name
Python
apache-2.0
Omenia/robotframework-whitelibrary,Omenia/robotframework-whitelibrary
python
## Code Before: from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text ## Instruction: Change to better argument name ## Code After: from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
... class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): ...
271df6a2cfb7319be7d0bf70906982d8c800c032
rxandroid/src/test/java/rx/android/plugins/RxAndroidPluginsTest.java
rxandroid/src/test/java/rx/android/plugins/RxAndroidPluginsTest.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.android.plugins; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public final class RxAndroidPluginsTest { @Before @After public void setUpAndTearDown() { RxAndroidPlugins.getInstance().reset(); } @Test public void registeredSchedulersHookIsUsed() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); assertSame(hook, plugins.getSchedulersHook()); } @Test public void registerSchedulersHookTwiceFails() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); try { plugins.registerSchedulersHook(hook); fail(); } catch (IllegalStateException e) { assertTrue(e.getMessage().startsWith("Another strategy was already registered:")); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.android.plugins; import org.junit.Test; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public final class RxAndroidPluginsTest { @Test public void registeredSchedulersHookIsUsed() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); assertSame(hook, plugins.getSchedulersHook()); } @Test public void registerSchedulersHookTwiceFails() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); try { plugins.registerSchedulersHook(hook); fail(); } catch (IllegalStateException e) { assertTrue(e.getMessage().startsWith("Another strategy was already registered:")); } } }
Remove unnecessary reset. These tests don't use the global instance.
Remove unnecessary reset. These tests don't use the global instance.
Java
apache-2.0
MaTriXy/RxAndroid,PibeDx/RxAndroid,ReactiveX/RxAndroid,EddieRingle/RxAndroid,PibeDx/RxAndroid,MaTriXy/RxAndroid
java
## Code Before: /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.android.plugins; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public final class RxAndroidPluginsTest { @Before @After public void setUpAndTearDown() { RxAndroidPlugins.getInstance().reset(); } @Test public void registeredSchedulersHookIsUsed() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); assertSame(hook, plugins.getSchedulersHook()); } @Test public void registerSchedulersHookTwiceFails() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); try { plugins.registerSchedulersHook(hook); fail(); } catch (IllegalStateException e) { assertTrue(e.getMessage().startsWith("Another strategy was already registered:")); } } } ## Instruction: Remove unnecessary reset. These tests don't use the global instance. ## Code After: /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.android.plugins; import org.junit.Test; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public final class RxAndroidPluginsTest { @Test public void registeredSchedulersHookIsUsed() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); assertSame(hook, plugins.getSchedulersHook()); } @Test public void registerSchedulersHookTwiceFails() { RxAndroidPlugins plugins = new RxAndroidPlugins(); RxAndroidSchedulersHook hook = new RxAndroidSchedulersHook(); plugins.registerSchedulersHook(hook); try { plugins.registerSchedulersHook(hook); fail(); } catch (IllegalStateException e) { assertTrue(e.getMessage().startsWith("Another strategy was already registered:")); } } }
... */ package rx.android.plugins; import org.junit.Test; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public final class RxAndroidPluginsTest { @Test public void registeredSchedulersHookIsUsed() { RxAndroidPlugins plugins = new RxAndroidPlugins(); ...
529ab85ac8a25b05690f507ed67ba767d4fb53db
pyEchosign/utils/handle_response.py
pyEchosign/utils/handle_response.py
from requests import Response def check_error(response: Response): """ Takes a requests package response object and checks the error code and raises the proper exception """ response_json = response.json() code = response_json.get('code', None) if response.status_code == 401: raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your ' 'account should have access to perform this action.') def response_success(response: Response): return 199 < response.status_code < 300
from requests import Response from exceptions.internal_exceptions import ApiError def check_error(response: Response): """ Takes a requests package response object and checks the error code and raises the proper exception """ if response.status_code == 401: raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your ' 'account should have access to perform this action.') elif not response_success(response): try: json_response = response.json() except ValueError: json_response = '' raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following ' f'JSON: "{json_response}" and content: "{response.content}""') def response_success(response: Response): return 199 < response.status_code < 300
Check for json() ValueError with requests when raising an ApiError in check_error()
Check for json() ValueError with requests when raising an ApiError in check_error()
Python
mit
JensAstrup/pyEchosign
python
## Code Before: from requests import Response def check_error(response: Response): """ Takes a requests package response object and checks the error code and raises the proper exception """ response_json = response.json() code = response_json.get('code', None) if response.status_code == 401: raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your ' 'account should have access to perform this action.') def response_success(response: Response): return 199 < response.status_code < 300 ## Instruction: Check for json() ValueError with requests when raising an ApiError in check_error() ## Code After: from requests import Response from exceptions.internal_exceptions import ApiError def check_error(response: Response): """ Takes a requests package response object and checks the error code and raises the proper exception """ if response.status_code == 401: raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your ' 'account should have access to perform this action.') elif not response_success(response): try: json_response = response.json() except ValueError: json_response = '' raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following ' f'JSON: "{json_response}" and content: "{response.content}""') def response_success(response: Response): return 199 < response.status_code < 300
... from requests import Response from exceptions.internal_exceptions import ApiError def check_error(response: Response): """ Takes a requests package response object and checks the error code and raises the proper exception """ if response.status_code == 401: raise PermissionError('Echosign API returned a 401, your access token may be invalid if you believe your ' 'account should have access to perform this action.') elif not response_success(response): try: json_response = response.json() except ValueError: json_response = '' raise ApiError(f'Received status code {response.status_code} from the Echosign API with the following ' f'JSON: "{json_response}" and content: "{response.content}""') def response_success(response: Response): return 199 < response.status_code < 300 ...
a816d0655504051ea12718a0e34bc9645fc92730
personal-site/projects/views.py
personal-site/projects/views.py
from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['projects'] = Project.objects.order_by('priority') context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
Remove unnecessary projects added to context
Remove unnecessary projects added to context
Python
bsd-3-clause
brandonw/personal-site,brandonw/personal-site,brandonw/personal-site
python
## Code Before: from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['projects'] = Project.objects.order_by('priority') context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context ## Instruction: Remove unnecessary projects added to context ## Code After: from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
... def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) ...
167b17feeaedb76752efff56afbd47b7d2fcb84d
src/main/java/org/breizhcamp/video/uploader/dto/Event.kt
src/main/java/org/breizhcamp/video/uploader/dto/Event.kt
package org.breizhcamp.video.uploader.dto import com.fasterxml.jackson.annotation.JsonProperty import java.time.LocalDateTime /** * JSON deserialization of an Event */ class Event { var id: Int? = null var name: String? = null var description: String? = null var speakers: String? = null var language: String? = null @JsonProperty("event_start") var eventStart: LocalDateTime? = null @JsonProperty("event_end") var eventEnd: LocalDateTime? = null @JsonProperty("event_type") var eventType: String? = null var format: String? = null var venue: String? = null @JsonProperty("venue_id") var venueId: String? = null @JsonProperty("video_url") var videoUrl:String? = null @JsonProperty("files_url") var filesUrl:String? = null @JsonProperty("slides_url") var slidesUrl: String? = null }
package org.breizhcamp.video.uploader.dto import com.fasterxml.jackson.annotation.JsonProperty import java.time.ZonedDateTime /** * JSON deserialization of an Event */ class Event { var id: Int? = null var name: String? = null var description: String? = null var speakers: String? = null var language: String? = null @JsonProperty("event_start") var eventStart: ZonedDateTime? = null @JsonProperty("event_end") var eventEnd: ZonedDateTime? = null @JsonProperty("event_type") var eventType: String? = null var format: String? = null var venue: String? = null @JsonProperty("venue_id") var venueId: String? = null @JsonProperty("video_url") var videoUrl:String? = null @JsonProperty("files_url") var filesUrl:String? = null @JsonProperty("slides_url") var slidesUrl: String? = null }
Use zoned dates like CFP does
Use zoned dates like CFP does
Kotlin
mit
breizhcamp/tools,breizhcamp/tools,breizhcamp/tools,breizhcamp/tools,breizhcamp/tools
kotlin
## Code Before: package org.breizhcamp.video.uploader.dto import com.fasterxml.jackson.annotation.JsonProperty import java.time.LocalDateTime /** * JSON deserialization of an Event */ class Event { var id: Int? = null var name: String? = null var description: String? = null var speakers: String? = null var language: String? = null @JsonProperty("event_start") var eventStart: LocalDateTime? = null @JsonProperty("event_end") var eventEnd: LocalDateTime? = null @JsonProperty("event_type") var eventType: String? = null var format: String? = null var venue: String? = null @JsonProperty("venue_id") var venueId: String? = null @JsonProperty("video_url") var videoUrl:String? = null @JsonProperty("files_url") var filesUrl:String? = null @JsonProperty("slides_url") var slidesUrl: String? = null } ## Instruction: Use zoned dates like CFP does ## Code After: package org.breizhcamp.video.uploader.dto import com.fasterxml.jackson.annotation.JsonProperty import java.time.ZonedDateTime /** * JSON deserialization of an Event */ class Event { var id: Int? = null var name: String? = null var description: String? = null var speakers: String? = null var language: String? = null @JsonProperty("event_start") var eventStart: ZonedDateTime? = null @JsonProperty("event_end") var eventEnd: ZonedDateTime? = null @JsonProperty("event_type") var eventType: String? = null var format: String? = null var venue: String? = null @JsonProperty("venue_id") var venueId: String? = null @JsonProperty("video_url") var videoUrl:String? = null @JsonProperty("files_url") var filesUrl:String? = null @JsonProperty("slides_url") var slidesUrl: String? = null }
// ... existing code ... import com.fasterxml.jackson.annotation.JsonProperty import java.time.ZonedDateTime /** * JSON deserialization of an Event // ... modified code ... var speakers: String? = null var language: String? = null @JsonProperty("event_start") var eventStart: ZonedDateTime? = null @JsonProperty("event_end") var eventEnd: ZonedDateTime? = null @JsonProperty("event_type") var eventType: String? = null var format: String? = null // ... rest of the code ...
2dbd2d385e821cee9a8bc8414bfba71c8b4dbc06
tests/test_ehrcorral.py
tests/test_ehrcorral.py
import unittest from ehrcorral import ehrcorral class TestEhrcorral(unittest.TestCase): def setUp(self): pass def test_something(self): pass def tearDown(self): pass
from __future__ import print_function from __future__ import division from __future__ import absolute_import from __future__ import unicode_literals import unittest from ehrcorral import ehrcorral from faker import Faker fake = Faker() fake.seed(8548) class TestEHRcorral(unittest.TestCase): def setUp(self): profile_fields = ['name', 'birthdate', 'ssn', 'address'] self.herd = [fake.profile(fields=profile_fields) for n in xrange(100)] def test_something(self): pass def tearDown(self): pass
Add test case setUp to generate fake patient info
Add test case setUp to generate fake patient info
Python
isc
nsh87/ehrcorral
python
## Code Before: import unittest from ehrcorral import ehrcorral class TestEhrcorral(unittest.TestCase): def setUp(self): pass def test_something(self): pass def tearDown(self): pass ## Instruction: Add test case setUp to generate fake patient info ## Code After: from __future__ import print_function from __future__ import division from __future__ import absolute_import from __future__ import unicode_literals import unittest from ehrcorral import ehrcorral from faker import Faker fake = Faker() fake.seed(8548) class TestEHRcorral(unittest.TestCase): def setUp(self): profile_fields = ['name', 'birthdate', 'ssn', 'address'] self.herd = [fake.profile(fields=profile_fields) for n in xrange(100)] def test_something(self): pass def tearDown(self): pass
// ... existing code ... from __future__ import print_function from __future__ import division from __future__ import absolute_import from __future__ import unicode_literals import unittest from ehrcorral import ehrcorral from faker import Faker fake = Faker() fake.seed(8548) class TestEHRcorral(unittest.TestCase): def setUp(self): profile_fields = ['name', 'birthdate', 'ssn', 'address'] self.herd = [fake.profile(fields=profile_fields) for n in xrange(100)] def test_something(self): pass // ... rest of the code ...
3bf41213abc7ddd8421e11c2149b536c255c13eb
pixpack/utils.py
pixpack/utils.py
import locale import os def sys_trans_var(): # check system language sys_loc = locale.getlocale() sys_lang = sys_loc[0] # system default language if sys_lang == 'en_EN' or sys_lang == 'en_GB': return 0 elif sys_lang == 'tr_TR': return 1 else: return 0 def name_existing_photos(dest_directory, dest_file, copy_suffix): # rename if the file is existed already, for instance: photo_1 dest_file_path = os.path.join(dest_directory, dest_file) i=1 while os.path.exists(dest_file_path): dest_file_path = os.path.join(dest_directory, dest_file) file_name = os.path.basename(dest_file_path) name, ext = os.path.splitext(file_name) name = name + "_" + str(copy_suffix) + str(i) new_name = name + ext dest_file_path = os.path.join(dest_directory, new_name) i+=1 return dest_file_path
import locale import os def sys_trans_var(): # check system language sys_loc = locale.getlocale() sys_lang = sys_loc[0] # system default language if sys_lang == 'en_EN' or sys_lang == 'en_GB': return 0 elif sys_lang == 'tr_TR': return 1 else: return 0 def name_existing_photos(dest_directory, dest_file, copy_suffix): # rename if the file is existed already, for instance: photo_1 dest_file_path = os.path.join(dest_directory, dest_file) i=1 if os.path.exists(dest_file_path): dest_directory = os.path.join(dest_directory, "copies") if not os.path.exists(dest_directory): os.makedirs(dest_directory) while os.path.exists(dest_file_path): dest_file_path = os.path.join(dest_directory, dest_file) file_name = os.path.basename(dest_file_path) name, ext = os.path.splitext(file_name) name = name + "_" + str(copy_suffix) + str(i) new_name = name + ext dest_file_path = os.path.join(dest_directory, new_name) i+=1 return dest_file_path
Store the duplicated items separately in related folder
Store the duplicated items separately in related folder
Python
mit
OrhanOdabasi/PixPack,OrhanOdabasi/PixPack
python
## Code Before: import locale import os def sys_trans_var(): # check system language sys_loc = locale.getlocale() sys_lang = sys_loc[0] # system default language if sys_lang == 'en_EN' or sys_lang == 'en_GB': return 0 elif sys_lang == 'tr_TR': return 1 else: return 0 def name_existing_photos(dest_directory, dest_file, copy_suffix): # rename if the file is existed already, for instance: photo_1 dest_file_path = os.path.join(dest_directory, dest_file) i=1 while os.path.exists(dest_file_path): dest_file_path = os.path.join(dest_directory, dest_file) file_name = os.path.basename(dest_file_path) name, ext = os.path.splitext(file_name) name = name + "_" + str(copy_suffix) + str(i) new_name = name + ext dest_file_path = os.path.join(dest_directory, new_name) i+=1 return dest_file_path ## Instruction: Store the duplicated items separately in related folder ## Code After: import locale import os def sys_trans_var(): # check system language sys_loc = locale.getlocale() sys_lang = sys_loc[0] # system default language if sys_lang == 'en_EN' or sys_lang == 'en_GB': return 0 elif sys_lang == 'tr_TR': return 1 else: return 0 def name_existing_photos(dest_directory, dest_file, copy_suffix): # rename if the file is existed already, for instance: photo_1 dest_file_path = os.path.join(dest_directory, dest_file) i=1 if os.path.exists(dest_file_path): dest_directory = os.path.join(dest_directory, "copies") if not os.path.exists(dest_directory): os.makedirs(dest_directory) while os.path.exists(dest_file_path): dest_file_path = os.path.join(dest_directory, dest_file) file_name = os.path.basename(dest_file_path) name, ext = os.path.splitext(file_name) name = name + "_" + str(copy_suffix) + str(i) new_name = name + ext dest_file_path = os.path.join(dest_directory, new_name) i+=1 return dest_file_path
// ... existing code ... # rename if the file is existed already, for instance: photo_1 dest_file_path = os.path.join(dest_directory, dest_file) i=1 if os.path.exists(dest_file_path): dest_directory = os.path.join(dest_directory, "copies") if not os.path.exists(dest_directory): os.makedirs(dest_directory) while os.path.exists(dest_file_path): dest_file_path = os.path.join(dest_directory, dest_file) file_name = os.path.basename(dest_file_path) // ... rest of the code ...
334c23e313cef141b6a18e8bf34be0fea662043f
cla_public/libs/call_centre_availability.py
cla_public/libs/call_centre_availability.py
import datetime def time_choice(time): display_format = "%I:%M %p" end = time + datetime.timedelta(minutes=30) display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0") return time.strftime("%H%M"), display_string def suffix(d): if 11 <= d <= 13: return "th" return {1: "st", 2: "nd", 3: "rd"}.get(d % 10, "th") def day_choice(day): return day.strftime("%Y%m%d"), "%s %s%s" % (day.strftime("%A"), day.strftime("%d").lstrip("0"), suffix(day.day))
import datetime from flask.ext.babel import lazy_gettext as _ def time_choice(time): display_format = "%I:%M %p" end = time + datetime.timedelta(minutes=30) display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0") return time.strftime("%H%M"), display_string def suffix(d): if 11 <= d <= 13: return _("th") return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th")) def day_choice(day): return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
Allow for Day name and ordinal suffix to be translated
Allow for Day name and ordinal suffix to be translated
Python
mit
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
python
## Code Before: import datetime def time_choice(time): display_format = "%I:%M %p" end = time + datetime.timedelta(minutes=30) display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0") return time.strftime("%H%M"), display_string def suffix(d): if 11 <= d <= 13: return "th" return {1: "st", 2: "nd", 3: "rd"}.get(d % 10, "th") def day_choice(day): return day.strftime("%Y%m%d"), "%s %s%s" % (day.strftime("%A"), day.strftime("%d").lstrip("0"), suffix(day.day)) ## Instruction: Allow for Day name and ordinal suffix to be translated ## Code After: import datetime from flask.ext.babel import lazy_gettext as _ def time_choice(time): display_format = "%I:%M %p" end = time + datetime.timedelta(minutes=30) display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0") return time.strftime("%H%M"), display_string def suffix(d): if 11 <= d <= 13: return _("th") return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th")) def day_choice(day): return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
# ... existing code ... import datetime from flask.ext.babel import lazy_gettext as _ def time_choice(time): # ... modified code ... def suffix(d): if 11 <= d <= 13: return _("th") return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th")) def day_choice(day): return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day)) # ... rest of the code ...
084eac5735404edeed62cee4e2b429c8f4f2a7a5
app/dao/inbound_numbers_dao.py
app/dao/inbound_numbers_dao.py
from app import db from app.dao.dao_utils import transactional from app.models import InboundNumber def dao_get_inbound_numbers(): return InboundNumber.query.all() def dao_get_available_inbound_numbers(): return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all() def dao_get_inbound_number_for_service(service_id): return InboundNumber.query.filter(InboundNumber.service_id == service_id).first() def dao_get_inbound_number(inbound_number_id): return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first() @transactional def dao_set_inbound_number_to_service(service_id, inbound_number): inbound_number.service_id = service_id db.session.add(inbound_number) @transactional def dao_set_inbound_number_active_flag(service_id, active): inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first() inbound_number.active = active db.session.add(inbound_number)
from app import db from app.dao.dao_utils import transactional from app.models import InboundNumber def dao_get_inbound_numbers(): return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all() def dao_get_available_inbound_numbers(): return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all() def dao_get_inbound_number_for_service(service_id): return InboundNumber.query.filter(InboundNumber.service_id == service_id).first() def dao_get_inbound_number(inbound_number_id): return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first() @transactional def dao_set_inbound_number_to_service(service_id, inbound_number): inbound_number.service_id = service_id db.session.add(inbound_number) @transactional def dao_set_inbound_number_active_flag(service_id, active): inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first() inbound_number.active = active db.session.add(inbound_number)
Update dao to order by updated_at, number
Update dao to order by updated_at, number
Python
mit
alphagov/notifications-api,alphagov/notifications-api
python
## Code Before: from app import db from app.dao.dao_utils import transactional from app.models import InboundNumber def dao_get_inbound_numbers(): return InboundNumber.query.all() def dao_get_available_inbound_numbers(): return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all() def dao_get_inbound_number_for_service(service_id): return InboundNumber.query.filter(InboundNumber.service_id == service_id).first() def dao_get_inbound_number(inbound_number_id): return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first() @transactional def dao_set_inbound_number_to_service(service_id, inbound_number): inbound_number.service_id = service_id db.session.add(inbound_number) @transactional def dao_set_inbound_number_active_flag(service_id, active): inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first() inbound_number.active = active db.session.add(inbound_number) ## Instruction: Update dao to order by updated_at, number ## Code After: from app import db from app.dao.dao_utils import transactional from app.models import InboundNumber def dao_get_inbound_numbers(): return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all() def dao_get_available_inbound_numbers(): return InboundNumber.query.filter(InboundNumber.active, InboundNumber.service_id.is_(None)).all() def dao_get_inbound_number_for_service(service_id): return InboundNumber.query.filter(InboundNumber.service_id == service_id).first() def dao_get_inbound_number(inbound_number_id): return InboundNumber.query.filter(InboundNumber.id == inbound_number_id).first() @transactional def dao_set_inbound_number_to_service(service_id, inbound_number): inbound_number.service_id = service_id db.session.add(inbound_number) @transactional def dao_set_inbound_number_active_flag(service_id, active): inbound_number = InboundNumber.query.filter(InboundNumber.service_id == service_id).first() inbound_number.active = active db.session.add(inbound_number)
... def dao_get_inbound_numbers(): return InboundNumber.query.order_by(InboundNumber.updated_at, InboundNumber.number).all() def dao_get_available_inbound_numbers(): ...
9173a91ed6fc234c4a7b9dbf1d2e8f853d977a86
mail_restrict_follower_selection/__manifest__.py
mail_restrict_follower_selection/__manifest__.py
{ "name": "Restrict follower selection", "version": "13.0.1.0.2", "author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)", "license": "AGPL-3", "category": "Social Network", "summary": "Define a domain from which followers can be selected", "depends": ["mail"], "data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"], "installable": True, }
{ "name": "Restrict follower selection", "version": "13.0.1.0.2", "author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)", "license": "AGPL-3", "category": "Social Network", "website": "https://github.com/OCA/social", "summary": "Define a domain from which followers can be selected", "depends": ["mail"], "data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"], "installable": True, }
Apply pre-commit changes: Resolve conflicts
[IMP] Apply pre-commit changes: Resolve conflicts
Python
agpl-3.0
OCA/social,OCA/social,OCA/social
python
## Code Before: { "name": "Restrict follower selection", "version": "13.0.1.0.2", "author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)", "license": "AGPL-3", "category": "Social Network", "summary": "Define a domain from which followers can be selected", "depends": ["mail"], "data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"], "installable": True, } ## Instruction: [IMP] Apply pre-commit changes: Resolve conflicts ## Code After: { "name": "Restrict follower selection", "version": "13.0.1.0.2", "author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)", "license": "AGPL-3", "category": "Social Network", "website": "https://github.com/OCA/social", "summary": "Define a domain from which followers can be selected", "depends": ["mail"], "data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"], "installable": True, }
# ... existing code ... "author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)", "license": "AGPL-3", "category": "Social Network", "website": "https://github.com/OCA/social", "summary": "Define a domain from which followers can be selected", "depends": ["mail"], "data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"], # ... rest of the code ...
c5bc66351870ce369b0d06161f07a1943dfeed93
plugin_handler.py
plugin_handler.py
import os import pkgutil import sys def load_venue_plugins(): """ Read plugin directory and load found plugins. Variable "blacklisted" can be used to exclude loading certain plugins. """ blacklisted = ["plugin_tiketti", "plugin_ontherocks"] foundblacklisted = list() loadedplugins = list() pluginspathabs = os.path.join(os.path.dirname(__file__), "venues") for loader, plugname, ispkg in \ pkgutil.iter_modules(path = [pluginspathabs]): if plugname in sys.modules: continue if plugname in blacklisted: foundblacklisted.append(plugname.lstrip("plugin_")) continue plugpath = "venues.%s" % (plugname) loadplug = __import__(plugpath, fromlist = [plugname]) classname = plugname.split("_")[1].title() loadedclass = getattr(loadplug, classname) instance = loadedclass() loadedplugins.append(instance) print(f"Loaded plugin: {instance.getVenueName()}") print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:]))) return loadedplugins if __name__ == '__main__': load_venue_plugins()
import os import pkgutil import sys def load_venue_plugins(): """ Read plugin directory and load found plugins. Variable "blacklisted" can be used to exclude loading certain plugins. """ blacklisted = ["plugin_tiketti"] foundblacklisted = list() loadedplugins = list() pluginspathabs = os.path.join(os.path.dirname(__file__), "venues") for loader, plugname, ispkg in \ pkgutil.iter_modules(path = [pluginspathabs]): if plugname in sys.modules: continue if plugname in blacklisted: foundblacklisted.append(plugname.lstrip("plugin_")) continue plugpath = "venues.%s" % (plugname) loadplug = __import__(plugpath, fromlist = [plugname]) classname = plugname.split("_")[1].title() loadedclass = getattr(loadplug, classname) instance = loadedclass() loadedplugins.append(instance) print(f"Loaded plugin: {instance.getVenueName()}") print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:]))) return loadedplugins if __name__ == '__main__': load_venue_plugins()
Enable On the rocks plugin
Enable On the rocks plugin
Python
isc
weezel/BandEventNotifier
python
## Code Before: import os import pkgutil import sys def load_venue_plugins(): """ Read plugin directory and load found plugins. Variable "blacklisted" can be used to exclude loading certain plugins. """ blacklisted = ["plugin_tiketti", "plugin_ontherocks"] foundblacklisted = list() loadedplugins = list() pluginspathabs = os.path.join(os.path.dirname(__file__), "venues") for loader, plugname, ispkg in \ pkgutil.iter_modules(path = [pluginspathabs]): if plugname in sys.modules: continue if plugname in blacklisted: foundblacklisted.append(plugname.lstrip("plugin_")) continue plugpath = "venues.%s" % (plugname) loadplug = __import__(plugpath, fromlist = [plugname]) classname = plugname.split("_")[1].title() loadedclass = getattr(loadplug, classname) instance = loadedclass() loadedplugins.append(instance) print(f"Loaded plugin: {instance.getVenueName()}") print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:]))) return loadedplugins if __name__ == '__main__': load_venue_plugins() ## Instruction: Enable On the rocks plugin ## Code After: import os import pkgutil import sys def load_venue_plugins(): """ Read plugin directory and load found plugins. Variable "blacklisted" can be used to exclude loading certain plugins. """ blacklisted = ["plugin_tiketti"] foundblacklisted = list() loadedplugins = list() pluginspathabs = os.path.join(os.path.dirname(__file__), "venues") for loader, plugname, ispkg in \ pkgutil.iter_modules(path = [pluginspathabs]): if plugname in sys.modules: continue if plugname in blacklisted: foundblacklisted.append(plugname.lstrip("plugin_")) continue plugpath = "venues.%s" % (plugname) loadplug = __import__(plugpath, fromlist = [plugname]) classname = plugname.split("_")[1].title() loadedclass = getattr(loadplug, classname) instance = loadedclass() loadedplugins.append(instance) print(f"Loaded plugin: {instance.getVenueName()}") print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:]))) return loadedplugins if __name__ == '__main__': load_venue_plugins()
... Read plugin directory and load found plugins. Variable "blacklisted" can be used to exclude loading certain plugins. """ blacklisted = ["plugin_tiketti"] foundblacklisted = list() loadedplugins = list() pluginspathabs = os.path.join(os.path.dirname(__file__), "venues") ...
70a97ab38d2b30652c41d1e058ef4447fdd54863
test_settings.py
test_settings.py
import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1
import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 USE_TZ = True
Fix Django 5.0 deprecation warning.
Fix Django 5.0 deprecation warning.
Python
mit
adamcharnock/django-tz-detect,adamcharnock/django-tz-detect,adamcharnock/django-tz-detect
python
## Code Before: import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 ## Instruction: Fix Django 5.0 deprecation warning. ## Code After: import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 USE_TZ = True
# ... existing code ... MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 USE_TZ = True # ... rest of the code ...
1599d4ed14fb3d7c7e551c9f6ce3f86d9df17cbd
mammoth/writers/html.py
mammoth/writers/html.py
from __future__ import unicode_literals from .abc import Writer import cgi class HtmlWriter(Writer): def __init__(self): self._fragments = [] def text(self, text): self._fragments.append(_escape_html(text)) def start(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1}>".format(name, attribute_string)) def end(self, name): self._fragments.append("</{0}>".format(name)) def self_closing(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1} />".format(name, attribute_string)) def append(self, html): self._fragments.append(html) def as_string(self): return "".join(self._fragments) def _escape_html(text): return cgi.escape(text, quote=True) def _generate_attribute_string(attributes): if attributes is None: return "" else: return "".join( ' {0}="{1}"'.format(key, _escape_html(attributes[key])) for key in sorted(attributes) )
from __future__ import unicode_literals from xml.sax.saxutils import escape from .abc import Writer class HtmlWriter(Writer): def __init__(self): self._fragments = [] def text(self, text): self._fragments.append(_escape_html(text)) def start(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1}>".format(name, attribute_string)) def end(self, name): self._fragments.append("</{0}>".format(name)) def self_closing(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1} />".format(name, attribute_string)) def append(self, html): self._fragments.append(html) def as_string(self): return "".join(self._fragments) def _escape_html(text): return escape(text, {'"': "&quot;"}) def _generate_attribute_string(attributes): if attributes is None: return "" else: return "".join( ' {0}="{1}"'.format(key, _escape_html(attributes[key])) for key in sorted(attributes) )
Use xml.sax.saxutils.escape instead of deprecated cgi.escape
Use xml.sax.saxutils.escape instead of deprecated cgi.escape ``` /usr/local/lib/python3.6/dist-packages/mammoth/writers/html.py:34: DeprecationWarning: cgi.escape is deprecated, use html.escape instead return cgi.escape(text, quote=True) ```
Python
bsd-2-clause
mwilliamson/python-mammoth
python
## Code Before: from __future__ import unicode_literals from .abc import Writer import cgi class HtmlWriter(Writer): def __init__(self): self._fragments = [] def text(self, text): self._fragments.append(_escape_html(text)) def start(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1}>".format(name, attribute_string)) def end(self, name): self._fragments.append("</{0}>".format(name)) def self_closing(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1} />".format(name, attribute_string)) def append(self, html): self._fragments.append(html) def as_string(self): return "".join(self._fragments) def _escape_html(text): return cgi.escape(text, quote=True) def _generate_attribute_string(attributes): if attributes is None: return "" else: return "".join( ' {0}="{1}"'.format(key, _escape_html(attributes[key])) for key in sorted(attributes) ) ## Instruction: Use xml.sax.saxutils.escape instead of deprecated cgi.escape ``` /usr/local/lib/python3.6/dist-packages/mammoth/writers/html.py:34: DeprecationWarning: cgi.escape is deprecated, use html.escape instead return cgi.escape(text, quote=True) ``` ## Code After: from __future__ import unicode_literals from xml.sax.saxutils import escape from .abc import Writer class HtmlWriter(Writer): def __init__(self): self._fragments = [] def text(self, text): self._fragments.append(_escape_html(text)) def start(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1}>".format(name, attribute_string)) def end(self, name): self._fragments.append("</{0}>".format(name)) def self_closing(self, name, attributes=None): attribute_string = _generate_attribute_string(attributes) self._fragments.append("<{0}{1} />".format(name, attribute_string)) def append(self, html): self._fragments.append(html) def as_string(self): return "".join(self._fragments) def _escape_html(text): return escape(text, {'"': "&quot;"}) def _generate_attribute_string(attributes): if attributes is None: return "" else: return "".join( ' {0}="{1}"'.format(key, _escape_html(attributes[key])) for key in sorted(attributes) )
# ... existing code ... from __future__ import unicode_literals from xml.sax.saxutils import escape from .abc import Writer class HtmlWriter(Writer): # ... modified code ... def _escape_html(text): return escape(text, {'"': "&quot;"}) def _generate_attribute_string(attributes): # ... rest of the code ...
c9f7c36e8f6bdf32a39f1ac8cd9ebbcf80df4a20
saleor/dashboard/category/forms.py
saleor/dashboard/category/forms.py
from django import forms from django.utils.text import slugify from django.utils.translation import ugettext_lazy as _ from mptt.forms import TreeNodeChoiceField from unidecode import unidecode from ...product.models import Category class CategoryForm(forms.ModelForm): parent = TreeNodeChoiceField(queryset=Category.objects.all(), required=False) class Meta: model = Category exclude = ['slug'] def clean_parent(self): parent = self.cleaned_data['parent'] if parent: if parent == self.instance: raise forms.ValidationError(_('A category may not be made a child of itself')) if self.instance in parent.get_ancestors(): raise forms.ValidationError(_('A category may not be made a child of any of its descendants.')) return parent def save(self, commit=True): self.instance.slug = slugify(unidecode(self.instance.name)) self.instance.set_hidden_descendants(self.cleaned_data['hidden']) return super(CategoryForm, self).save(commit=commit)
from django import forms from django.utils.text import slugify from django.utils.translation import ugettext_lazy as _ from mptt.forms import TreeNodeChoiceField from unidecode import unidecode from ...product.models import Category class CategoryForm(forms.ModelForm): parent = TreeNodeChoiceField(queryset=Category.objects.all(), required=False) class Meta: model = Category exclude = ['slug'] def clean_parent(self): parent = self.cleaned_data['parent'] if parent: if parent == self.instance: raise forms.ValidationError(_('A category may not be made a child of itself')) if self.instance in parent.get_ancestors(): raise forms.ValidationError(_('A category may not be made a child of any of its descendants.')) return parent def save(self, commit=True): self.instance.slug = slugify(unidecode(self.instance.name)) super(CategoryForm, self).save(commit=commit) self.instance.set_hidden_descendants(self.cleaned_data['hidden']) return self.instance
Fix getting category's descendants before save
Fix getting category's descendants before save
Python
bsd-3-clause
taedori81/saleor,laosunhust/saleor,taedori81/saleor,josesanch/saleor,laosunhust/saleor,maferelo/saleor,rodrigozn/CW-Shop,josesanch/saleor,paweltin/saleor,tfroehlich82/saleor,car3oon/saleor,arth-co/saleor,rodrigozn/CW-Shop,avorio/saleor,paweltin/saleor,maferelo/saleor,KenMutemi/saleor,rchav/vinerack,itbabu/saleor,avorio/saleor,arth-co/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,dashmug/saleor,KenMutemi/saleor,josesanch/saleor,jreigel/saleor,UITools/saleor,jreigel/saleor,itbabu/saleor,Drekscott/Motlaesaleor,mociepka/saleor,UITools/saleor,laosunhust/saleor,spartonia/saleor,rodrigozn/CW-Shop,taedori81/saleor,tfroehlich82/saleor,arth-co/saleor,paweltin/saleor,maferelo/saleor,spartonia/saleor,UITools/saleor,Drekscott/Motlaesaleor,car3oon/saleor,avorio/saleor,laosunhust/saleor,car3oon/saleor,spartonia/saleor,taedori81/saleor,rchav/vinerack,spartonia/saleor,UITools/saleor,itbabu/saleor,Drekscott/Motlaesaleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,dashmug/saleor,Drekscott/Motlaesaleor,paweltin/saleor,avorio/saleor,arth-co/saleor,dashmug/saleor,jreigel/saleor,rchav/vinerack,mociepka/saleor
python
## Code Before: from django import forms from django.utils.text import slugify from django.utils.translation import ugettext_lazy as _ from mptt.forms import TreeNodeChoiceField from unidecode import unidecode from ...product.models import Category class CategoryForm(forms.ModelForm): parent = TreeNodeChoiceField(queryset=Category.objects.all(), required=False) class Meta: model = Category exclude = ['slug'] def clean_parent(self): parent = self.cleaned_data['parent'] if parent: if parent == self.instance: raise forms.ValidationError(_('A category may not be made a child of itself')) if self.instance in parent.get_ancestors(): raise forms.ValidationError(_('A category may not be made a child of any of its descendants.')) return parent def save(self, commit=True): self.instance.slug = slugify(unidecode(self.instance.name)) self.instance.set_hidden_descendants(self.cleaned_data['hidden']) return super(CategoryForm, self).save(commit=commit) ## Instruction: Fix getting category's descendants before save ## Code After: from django import forms from django.utils.text import slugify from django.utils.translation import ugettext_lazy as _ from mptt.forms import TreeNodeChoiceField from unidecode import unidecode from ...product.models import Category class CategoryForm(forms.ModelForm): parent = TreeNodeChoiceField(queryset=Category.objects.all(), required=False) class Meta: model = Category exclude = ['slug'] def clean_parent(self): parent = self.cleaned_data['parent'] if parent: if parent == self.instance: raise forms.ValidationError(_('A category may not be made a child of itself')) if self.instance in parent.get_ancestors(): raise forms.ValidationError(_('A category may not be made a child of any of its descendants.')) return parent def save(self, commit=True): self.instance.slug = slugify(unidecode(self.instance.name)) super(CategoryForm, self).save(commit=commit) self.instance.set_hidden_descendants(self.cleaned_data['hidden']) return self.instance
... def save(self, commit=True): self.instance.slug = slugify(unidecode(self.instance.name)) super(CategoryForm, self).save(commit=commit) self.instance.set_hidden_descendants(self.cleaned_data['hidden']) return self.instance ...
9a7643d72aec82f0999da29019cc4aab0ab445b4
datasource/datasource-war/src/main/java/org/wildfly/swarm/examples/ds/war/MyResource.java
datasource/datasource-war/src/main/java/org/wildfly/swarm/examples/ds/war/MyResource.java
package org.wildfly.swarm.examples.ds.war; import java.sql.Connection; import java.sql.SQLException; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; /** * @author Bob McWhirter */ @Path("/") public class MyResource { @GET @Produces("text/plain") public String get() throws NamingException, SQLException { Context ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup("jboss/datasources/ExampleDS"); Connection conn = ds.getConnection(); try { return "Howdy using connection: " + conn; } finally { conn.close(); } } }
package org.wildfly.swarm.examples.ds.war; import java.sql.Connection; import java.sql.SQLException; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; /** * @author Bob McWhirter */ @Path("/") public class MyResource { @GET @Produces("text/plain") public String get() throws NamingException, SQLException { Context ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup("jboss/datasources/ExampleDS"); Connection conn = ds.getConnection(); try { return "Howdy using driver: " + conn.getMetaData().getDriverName(); } finally { conn.close(); } } }
Modify datasource example to actually show driver name
Modify datasource example to actually show driver name
Java
apache-2.0
wildfly-swarm/wildfly-swarm-examples,wildfly-swarm/wildfly-swarm-examples,emag/wildfly-swarm-examples,mstahv/wildfly-swarm-examples,wildfly-swarm/wildfly-swarm-examples,emag/wildfly-swarm-examples,wildfly-swarm/wildfly-swarm-examples,wildfly-swarm/wildfly-swarm-examples,mstahv/wildfly-swarm-examples,mstahv/wildfly-swarm-examples,emag/wildfly-swarm-examples,mstahv/wildfly-swarm-examples,emag/wildfly-swarm-examples,emag/wildfly-swarm-examples,wildfly-swarm/wildfly-swarm-examples,emag/wildfly-swarm-examples
java
## Code Before: package org.wildfly.swarm.examples.ds.war; import java.sql.Connection; import java.sql.SQLException; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; /** * @author Bob McWhirter */ @Path("/") public class MyResource { @GET @Produces("text/plain") public String get() throws NamingException, SQLException { Context ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup("jboss/datasources/ExampleDS"); Connection conn = ds.getConnection(); try { return "Howdy using connection: " + conn; } finally { conn.close(); } } } ## Instruction: Modify datasource example to actually show driver name ## Code After: package org.wildfly.swarm.examples.ds.war; import java.sql.Connection; import java.sql.SQLException; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; /** * @author Bob McWhirter */ @Path("/") public class MyResource { @GET @Produces("text/plain") public String get() throws NamingException, SQLException { Context ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup("jboss/datasources/ExampleDS"); Connection conn = ds.getConnection(); try { return "Howdy using driver: " + conn.getMetaData().getDriverName(); } finally { conn.close(); } } }
... DataSource ds = (DataSource) ctx.lookup("jboss/datasources/ExampleDS"); Connection conn = ds.getConnection(); try { return "Howdy using driver: " + conn.getMetaData().getDriverName(); } finally { conn.close(); } ...
e27f04e9c8d5d74afdd9cd7d6990cad5ff6f6cb5
api/v330/docking_event/serializers.py
api/v330/docking_event/serializers.py
from api.v330.common.serializers import * class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): spacecraft = SpacecraftSerializer(read_only=True, many=False) class Meta: model = SpacecraftFlight fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft') class DockingEventSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True) docking_location = serializers.StringRelatedField(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location') class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False) docking_location = serializers.StringRelatedField(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
from api.v330.common.serializers import * class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): spacecraft = SpacecraftSerializer(read_only=True, many=False) class Meta: model = SpacecraftFlight fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft') class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): class Meta: model = SpaceStation fields = ('id', 'url', 'name', 'image_url') class DockingEventSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True) docking_location = serializers.StringRelatedField(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location') class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False) docking_location = serializers.StringRelatedField(many=False, read_only=True) space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
Add space_station field to detailed docking event
Add space_station field to detailed docking event
Python
apache-2.0
ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server
python
## Code Before: from api.v330.common.serializers import * class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): spacecraft = SpacecraftSerializer(read_only=True, many=False) class Meta: model = SpacecraftFlight fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft') class DockingEventSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True) docking_location = serializers.StringRelatedField(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location') class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False) docking_location = serializers.StringRelatedField(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location') ## Instruction: Add space_station field to detailed docking event ## Code After: from api.v330.common.serializers import * class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): spacecraft = SpacecraftSerializer(read_only=True, many=False) class Meta: model = SpacecraftFlight fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft') class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): class Meta: model = SpaceStation fields = ('id', 'url', 'name', 'image_url') class DockingEventSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True) docking_location = serializers.StringRelatedField(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location') class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False) docking_location = serializers.StringRelatedField(many=False, read_only=True) space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
# ... existing code ... class Meta: model = SpacecraftFlight fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft') class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer): class Meta: model = SpaceStation fields = ('id', 'url', 'name', 'image_url') class DockingEventSerializer(serializers.HyperlinkedModelSerializer): # ... modified code ... class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer): flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False) docking_location = serializers.StringRelatedField(many=False, read_only=True) space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True) class Meta: model = DockingEvent fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station') # ... rest of the code ...
ab73b2132825e9415ff24306a9d89da10294d79e
icekit/utils/management/base.py
icekit/utils/management/base.py
import time from django import db from django.core.management.base import BaseCommand from optparse import make_option class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() self.stdout.write('Sleeping for %s min.' % options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: self.stdout.write('Closing database connection: %s' % alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() logger.info('Sleeping for %s min.', options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: logger.info('Closing database connection: %s', alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
Use `logging` instead of printing to stdout by default.
Use `logging` instead of printing to stdout by default.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
python
## Code Before: import time from django import db from django.core.management.base import BaseCommand from optparse import make_option class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() self.stdout.write('Sleeping for %s min.' % options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: self.stdout.write('Closing database connection: %s' % alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method') ## Instruction: Use `logging` instead of printing to stdout by default. ## Code After: import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() logger.info('Sleeping for %s min.', options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: logger.info('Closing database connection: %s', alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
// ... existing code ... import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): // ... modified code ... while True: self.task(*args, **options) self.cleanup() logger.info('Sleeping for %s min.', options['interval']) time.sleep(60 * options['interval']) def cleanup(self): ... # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: logger.info('Closing database connection: %s', alias) db.connections[alias].close() def task(self, *args, **options): // ... rest of the code ...
542ab347d5a0195c88be212d966253d4385b8af5
api.py
api.py
import webapp2 from google.appengine.api import channel from google.appengine.api import users open_channels = set() class ChannelDidConnect(webapp2.RequestHandler): def post(self): print "Got connection" open_channels.add(self.request.get("from")) class ChannelDisconnect(webapp2.RequestHandler): def post(self): print "Got disconnection" channelId = self.request.get("from") if channelId in open_channels: open_channels.remove(channelId) class ChannelRequest(webapp2.RequestHandler): def get(self): user = users.get_current_user() if not user: self.response.write({"token": ""}) return token = channel.create_channel(user.user_id()) self.response.write( "{\"token\": \"%s\"}" % token ) class Message(webapp2.RequestHandler): def post(self): # Only accept messages from logged in users user = users.get_current_user() if not user: return print open_channels for channelId in open_channels: channel.send_message(channelId, "message=%s&author=%s" % (self.request.POST["message"], self.request.POST["author"])) app = webapp2.WSGIApplication([ ('/api/channel', ChannelRequest), ('/api/message', Message), ('/_ah/channel/connected/', ChannelDidConnect), ('/_ah/channel/disconnected/', ChannelDisconnect), ])
import webapp2 from google.appengine.api import channel from google.appengine.api import users open_channels = set() class ChannelDidConnect(webapp2.RequestHandler): def post(self): print "Got connection" open_channels.add(self.request.get("from")) class ChannelDisconnect(webapp2.RequestHandler): def post(self): print "Got disconnection" channelId = self.request.get("from") if channelId in open_channels: open_channels.remove(channelId) class ChannelRequest(webapp2.RequestHandler): def get(self): user = users.get_current_user() if not user: self.response.write({"token": ""}) return token = channel.create_channel(user.user_id()) self.response.write( "{\"token\": \"%s\"}" % token ) class Message(webapp2.RequestHandler): def post(self): self.handleRequest() def get(self): self.handleRequest() def handleRequest(self): print open_channels for channelId in open_channels: channel.send_message(channelId, "message=%s&author=%s" % (self.request.params["message"], self.request.params["author"])) app = webapp2.WSGIApplication([ ('/api/channel', ChannelRequest), ('/api/message', Message), ('/_ah/channel/connected/', ChannelDidConnect), ('/_ah/channel/disconnected/', ChannelDisconnect), ])
Allow messages to come in through GET
Allow messages to come in through GET
Python
mit
misterwilliam/gae-channels-sample,misterwilliam/gae-channels-sample,misterwilliam/gae-channels-sample
python
## Code Before: import webapp2 from google.appengine.api import channel from google.appengine.api import users open_channels = set() class ChannelDidConnect(webapp2.RequestHandler): def post(self): print "Got connection" open_channels.add(self.request.get("from")) class ChannelDisconnect(webapp2.RequestHandler): def post(self): print "Got disconnection" channelId = self.request.get("from") if channelId in open_channels: open_channels.remove(channelId) class ChannelRequest(webapp2.RequestHandler): def get(self): user = users.get_current_user() if not user: self.response.write({"token": ""}) return token = channel.create_channel(user.user_id()) self.response.write( "{\"token\": \"%s\"}" % token ) class Message(webapp2.RequestHandler): def post(self): # Only accept messages from logged in users user = users.get_current_user() if not user: return print open_channels for channelId in open_channels: channel.send_message(channelId, "message=%s&author=%s" % (self.request.POST["message"], self.request.POST["author"])) app = webapp2.WSGIApplication([ ('/api/channel', ChannelRequest), ('/api/message', Message), ('/_ah/channel/connected/', ChannelDidConnect), ('/_ah/channel/disconnected/', ChannelDisconnect), ]) ## Instruction: Allow messages to come in through GET ## Code After: import webapp2 from google.appengine.api import channel from google.appengine.api import users open_channels = set() class ChannelDidConnect(webapp2.RequestHandler): def post(self): print "Got connection" open_channels.add(self.request.get("from")) class ChannelDisconnect(webapp2.RequestHandler): def post(self): print "Got disconnection" channelId = self.request.get("from") if channelId in open_channels: open_channels.remove(channelId) class ChannelRequest(webapp2.RequestHandler): def get(self): user = users.get_current_user() if not user: self.response.write({"token": ""}) return token = channel.create_channel(user.user_id()) self.response.write( "{\"token\": \"%s\"}" % token ) class Message(webapp2.RequestHandler): def post(self): self.handleRequest() def get(self): self.handleRequest() def handleRequest(self): print open_channels for channelId in open_channels: channel.send_message(channelId, "message=%s&author=%s" % (self.request.params["message"], self.request.params["author"])) app = webapp2.WSGIApplication([ ('/api/channel', ChannelRequest), ('/api/message', Message), ('/_ah/channel/connected/', ChannelDidConnect), ('/_ah/channel/disconnected/', ChannelDisconnect), ])
... "{\"token\": \"%s\"}" % token ) class Message(webapp2.RequestHandler): def post(self): self.handleRequest() def get(self): self.handleRequest() def handleRequest(self): print open_channels for channelId in open_channels: channel.send_message(channelId, "message=%s&author=%s" % (self.request.params["message"], self.request.params["author"])) app = webapp2.WSGIApplication([ ...
258df4932fe937c0baf45d30de88c194f7f7718a
conftest.py
conftest.py
import numba import numpy import pkg_resources import pytest # The first version of numpy that broke backwards compat and improved printing. # # We set the printing format to legacy to maintain our doctests' compatibility # with both newer and older versions. # # See: https://docs.scipy.org/doc/numpy/release.html#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # NUMPY_PRINT_ALTERING_VERSION = pkg_resources.parse_version('1.14.0') @pytest.fixture(autouse=True) def add_preconfigured_np(doctest_namespace): """ Fixture executed for every doctest. Injects pre-configured numpy into each test's namespace. Note that even with this, doctests might fail due to the lack of full compatibility when using ``numpy.set_printoptions(legacy='1.13')``. Some of the whitespace issues can be fixed by ``NORMALIZE_WHITESPACE`` doctest option, which is currently set in ``pytest.ini``. See: https://github.com/numpy/numpy/issues/10383 """ current_version = pkg_resources.parse_version(numpy.__version__) if current_version >= NUMPY_PRINT_ALTERING_VERSION: numpy.set_printoptions(legacy='1.13') doctest_namespace['np'] = numpy def pytest_report_header(config): return 'Testing fastats using: NumPy {}, numba {}'.format( numpy.__version__, numba.__version__ )
import numba import numpy import pkg_resources import pytest import scipy # The first version of numpy that broke backwards compat and improved printing. # # We set the printing format to legacy to maintain our doctests' compatibility # with both newer and older versions. # # See: https://docs.scipy.org/doc/numpy/release.html#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # NUMPY_PRINT_ALTERING_VERSION = pkg_resources.parse_version('1.14.0') @pytest.fixture(autouse=True) def add_preconfigured_np(doctest_namespace): """ Fixture executed for every doctest. Injects pre-configured numpy into each test's namespace. Note that even with this, doctests might fail due to the lack of full compatibility when using ``numpy.set_printoptions(legacy='1.13')``. Some of the whitespace issues can be fixed by ``NORMALIZE_WHITESPACE`` doctest option, which is currently set in ``pytest.ini``. See: https://github.com/numpy/numpy/issues/10383 """ current_version = pkg_resources.parse_version(numpy.__version__) if current_version >= NUMPY_PRINT_ALTERING_VERSION: numpy.set_printoptions(legacy='1.13') doctest_namespace['np'] = numpy def pytest_report_header(config): return 'Testing fastats using: Numba {}, NumPy {}, SciPy {}'.format( numba.__version__, numpy.__version__, scipy.__version__, )
Add SciPy version to pytest header
Add SciPy version to pytest header
Python
mit
dwillmer/fastats,fastats/fastats
python
## Code Before: import numba import numpy import pkg_resources import pytest # The first version of numpy that broke backwards compat and improved printing. # # We set the printing format to legacy to maintain our doctests' compatibility # with both newer and older versions. # # See: https://docs.scipy.org/doc/numpy/release.html#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # NUMPY_PRINT_ALTERING_VERSION = pkg_resources.parse_version('1.14.0') @pytest.fixture(autouse=True) def add_preconfigured_np(doctest_namespace): """ Fixture executed for every doctest. Injects pre-configured numpy into each test's namespace. Note that even with this, doctests might fail due to the lack of full compatibility when using ``numpy.set_printoptions(legacy='1.13')``. Some of the whitespace issues can be fixed by ``NORMALIZE_WHITESPACE`` doctest option, which is currently set in ``pytest.ini``. See: https://github.com/numpy/numpy/issues/10383 """ current_version = pkg_resources.parse_version(numpy.__version__) if current_version >= NUMPY_PRINT_ALTERING_VERSION: numpy.set_printoptions(legacy='1.13') doctest_namespace['np'] = numpy def pytest_report_header(config): return 'Testing fastats using: NumPy {}, numba {}'.format( numpy.__version__, numba.__version__ ) ## Instruction: Add SciPy version to pytest header ## Code After: import numba import numpy import pkg_resources import pytest import scipy # The first version of numpy that broke backwards compat and improved printing. # # We set the printing format to legacy to maintain our doctests' compatibility # with both newer and older versions. # # See: https://docs.scipy.org/doc/numpy/release.html#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # NUMPY_PRINT_ALTERING_VERSION = pkg_resources.parse_version('1.14.0') @pytest.fixture(autouse=True) def add_preconfigured_np(doctest_namespace): """ Fixture executed for every doctest. Injects pre-configured numpy into each test's namespace. Note that even with this, doctests might fail due to the lack of full compatibility when using ``numpy.set_printoptions(legacy='1.13')``. Some of the whitespace issues can be fixed by ``NORMALIZE_WHITESPACE`` doctest option, which is currently set in ``pytest.ini``. See: https://github.com/numpy/numpy/issues/10383 """ current_version = pkg_resources.parse_version(numpy.__version__) if current_version >= NUMPY_PRINT_ALTERING_VERSION: numpy.set_printoptions(legacy='1.13') doctest_namespace['np'] = numpy def pytest_report_header(config): return 'Testing fastats using: Numba {}, NumPy {}, SciPy {}'.format( numba.__version__, numpy.__version__, scipy.__version__, )
... import numpy import pkg_resources import pytest import scipy # The first version of numpy that broke backwards compat and improved printing. ... def pytest_report_header(config): return 'Testing fastats using: Numba {}, NumPy {}, SciPy {}'.format( numba.__version__, numpy.__version__, scipy.__version__, ) ...
ab8d715f67f1521bae7d609c5635df23fbce9641
app/src/main/java/com/example/android/androidsimulator/activities/SelectedMessageContact.java
app/src/main/java/com/example/android/androidsimulator/activities/SelectedMessageContact.java
package com.example.android.androidsimulator.activities; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import com.example.android.androidsimulator.R; public class SelectedMessageContact extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_selected_message_contact); } }
package com.example.android.androidsimulator.activities; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.example.android.androidsimulator.R; import com.example.android.androidsimulator.data.Messages; import java.util.ArrayList; public class SelectedMessageContact extends AppCompatActivity { SharedPreferences preferences; SharedPreferences.Editor editor; ArrayList<Messages> messages; Button sendMessageButton; EditText contentMessage; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_selected_message_contact); contentMessage = (EditText) findViewById(R.id.message_editText); sendMessageButton = (Button) findViewById(R.id.sendMessage_button); sendMessageButton.setEnabled(false); // setEvents setEvents(); } private void setEvents() { // enable button of sendMessage contentMessage.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (contentMessage.length() > 0) { sendMessageButton.setEnabled(true); } else { sendMessageButton.setEnabled(false); } } @Override public void afterTextChanged(Editable editable) { } }); // button send message sendMessageButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { sendMessage(); } }); } private void sendMessage() { } }
Add method of sendMessageButton - Enabled/Disabled
Add method of sendMessageButton - Enabled/Disabled
Java
mit
malucs-developer/Android-Simulator
java
## Code Before: package com.example.android.androidsimulator.activities; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import com.example.android.androidsimulator.R; public class SelectedMessageContact extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_selected_message_contact); } } ## Instruction: Add method of sendMessageButton - Enabled/Disabled ## Code After: package com.example.android.androidsimulator.activities; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.example.android.androidsimulator.R; import com.example.android.androidsimulator.data.Messages; import java.util.ArrayList; public class SelectedMessageContact extends AppCompatActivity { SharedPreferences preferences; SharedPreferences.Editor editor; ArrayList<Messages> messages; Button sendMessageButton; EditText contentMessage; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_selected_message_contact); contentMessage = (EditText) findViewById(R.id.message_editText); sendMessageButton = (Button) findViewById(R.id.sendMessage_button); sendMessageButton.setEnabled(false); // setEvents setEvents(); } private void setEvents() { // enable button of sendMessage contentMessage.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (contentMessage.length() > 0) { sendMessageButton.setEnabled(true); } else { sendMessageButton.setEnabled(false); } } @Override public void afterTextChanged(Editable editable) { } }); // button send message sendMessageButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { sendMessage(); } }); } private void sendMessage() { } }
... package com.example.android.androidsimulator.activities; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.example.android.androidsimulator.R; import com.example.android.androidsimulator.data.Messages; import java.util.ArrayList; public class SelectedMessageContact extends AppCompatActivity { SharedPreferences preferences; SharedPreferences.Editor editor; ArrayList<Messages> messages; Button sendMessageButton; EditText contentMessage; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_selected_message_contact); contentMessage = (EditText) findViewById(R.id.message_editText); sendMessageButton = (Button) findViewById(R.id.sendMessage_button); sendMessageButton.setEnabled(false); // setEvents setEvents(); } private void setEvents() { // enable button of sendMessage contentMessage.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (contentMessage.length() > 0) { sendMessageButton.setEnabled(true); } else { sendMessageButton.setEnabled(false); } } @Override public void afterTextChanged(Editable editable) { } }); // button send message sendMessageButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { sendMessage(); } }); } private void sendMessage() { } } ...
bddab649c6684f09870983dca97c39eb30b62c06
djangobotcfg/status.py
djangobotcfg/status.py
from buildbot.status import html, words from buildbot.status.web.authz import Authz from buildbot.status.web.auth import BasicAuth # authz = Authz( # forceBuild=True, # forceAllBuilds=True, # pingBuilder=True, # gracefulShutdown=True, # stopBuild=True, # stopAllBuilds=True, # cancelPendingBuild=True, # cleanShutdown=True, # ) def get_status(): return [ html.WebStatus( http_port = '8010', # authz = authz, order_console_by_time = True, revlink = 'http://code.djangoproject.com/changeset/%s', changecommentlink = ( r'\b#(\d+)\b', r'http://code.djangoproject.com/ticket/\1', r'Ticket \g<0>' ) ), words.IRC( host = 'irc.freenode.net', channels = ['#revsys'], nick = 'djangobuilds', notify_events = { 'successToFailure': True, 'failureToSuccess': True, } ) ]
from buildbot.status import html, words from buildbot.status.web.authz import Authz from buildbot.status.web.auth import BasicAuth def get_status(): return [ html.WebStatus( http_port = '8010', # authz = authz, order_console_by_time = True, revlink = 'http://code.djangoproject.com/changeset/%s', changecommentlink = ( r'\b#(\d+)\b', r'http://code.djangoproject.com/ticket/\1', r'Ticket \g<0>' ) ), ]
Remove the IRC bot for now, and also the commented-out code.
Remove the IRC bot for now, and also the commented-out code.
Python
bsd-3-clause
hochanh/django-buildmaster,jacobian-archive/django-buildmaster
python
## Code Before: from buildbot.status import html, words from buildbot.status.web.authz import Authz from buildbot.status.web.auth import BasicAuth # authz = Authz( # forceBuild=True, # forceAllBuilds=True, # pingBuilder=True, # gracefulShutdown=True, # stopBuild=True, # stopAllBuilds=True, # cancelPendingBuild=True, # cleanShutdown=True, # ) def get_status(): return [ html.WebStatus( http_port = '8010', # authz = authz, order_console_by_time = True, revlink = 'http://code.djangoproject.com/changeset/%s', changecommentlink = ( r'\b#(\d+)\b', r'http://code.djangoproject.com/ticket/\1', r'Ticket \g<0>' ) ), words.IRC( host = 'irc.freenode.net', channels = ['#revsys'], nick = 'djangobuilds', notify_events = { 'successToFailure': True, 'failureToSuccess': True, } ) ] ## Instruction: Remove the IRC bot for now, and also the commented-out code. ## Code After: from buildbot.status import html, words from buildbot.status.web.authz import Authz from buildbot.status.web.auth import BasicAuth def get_status(): return [ html.WebStatus( http_port = '8010', # authz = authz, order_console_by_time = True, revlink = 'http://code.djangoproject.com/changeset/%s', changecommentlink = ( r'\b#(\d+)\b', r'http://code.djangoproject.com/ticket/\1', r'Ticket \g<0>' ) ), ]
... from buildbot.status import html, words from buildbot.status.web.authz import Authz from buildbot.status.web.auth import BasicAuth def get_status(): return [ ... r'Ticket \g<0>' ) ), ] ...
574b4d95a48f4df676ed5f23f0c83a9df2bc241d
pydux/log_middleware.py
pydux/log_middleware.py
def log_middleware(store): """log all actions to console as they are dispatched""" def wrapper(next_): def log_dispatch(action): print('Dispatch Action:', action) return next_(action) return log_dispatch return wrapper
from __future__ import print_function """ logging middleware example """ def log_middleware(store): """log all actions to console as they are dispatched""" def wrapper(next_): def log_dispatch(action): print('Dispatch Action:', action) return next_(action) return log_dispatch return wrapper
Use from __future__ import for print function
Use from __future__ import for print function
Python
mit
usrlocalben/pydux
python
## Code Before: def log_middleware(store): """log all actions to console as they are dispatched""" def wrapper(next_): def log_dispatch(action): print('Dispatch Action:', action) return next_(action) return log_dispatch return wrapper ## Instruction: Use from __future__ import for print function ## Code After: from __future__ import print_function """ logging middleware example """ def log_middleware(store): """log all actions to console as they are dispatched""" def wrapper(next_): def log_dispatch(action): print('Dispatch Action:', action) return next_(action) return log_dispatch return wrapper
// ... existing code ... from __future__ import print_function """ logging middleware example """ def log_middleware(store): """log all actions to console as they are dispatched""" // ... rest of the code ...
94e3572a4049b0eb0ff0d762a3bce5248a5bd507
src/sas/sasgui/perspectives/file_converter/file_converter.py
src/sas/sasgui/perspectives/file_converter/file_converter.py
import logging from sas.sasgui.guiframe.plugin_base import PluginBase from sas.sasgui.perspectives.file_converter.converter_panel import ConverterWindow logger = logging.getLogger(__name__) class Plugin(PluginBase): """ This class defines the interface for a Plugin class for File Converter perspective """ def __init__(self): PluginBase.__init__(self, name="File Converter") logger.info("File Converter plug-in started") self._sub_menu = "Tool" self.converter_frame = None def get_tools(self): """ Returns a set of menu entries """ help_txt = "Convert single column ASCII data to CanSAS format" return [("File Converter", help_txt, self.on_file_converter)] def on_file_converter(self, event): if self.converter_frame is None: frame = ConverterWindow(parent=self.parent, base=self.parent, manager=self) self.put_icon(frame) self.converter_frame = frame else: self.converter_frame.Show(False) self.converter_frame.Show(True) def put_icon(self, frame): """ Put icon in the frame title bar """ if hasattr(frame, "IsIconized"): if not frame.IsIconized(): try: icon = self.parent.GetIcon() frame.SetIcon(icon) except: pass
import logging from sas.sasgui.guiframe.plugin_base import PluginBase from sas.sasgui.perspectives.file_converter.converter_panel import ConverterWindow logger = logging.getLogger(__name__) class Plugin(PluginBase): """ This class defines the interface for a Plugin class for File Converter perspective """ def __init__(self): PluginBase.__init__(self, name="File Converter") logger.info("File Converter plug-in started") self._sub_menu = "Tool" self.converter_frame = None def get_tools(self): """ Returns a set of menu entries """ help_txt = "Convert ASCII or BSL/OTOKO data to CanSAS or NXcanSAS formats" return [("File Converter", help_txt, self.on_file_converter)] def on_file_converter(self, event): if self.converter_frame is None: frame = ConverterWindow(parent=self.parent, base=self.parent, manager=self) self.put_icon(frame) self.converter_frame = frame else: self.converter_frame.Show(False) self.converter_frame.Show(True) def put_icon(self, frame): """ Put icon in the frame title bar """ if hasattr(frame, "IsIconized"): if not frame.IsIconized(): try: icon = self.parent.GetIcon() frame.SetIcon(icon) except: pass
Update file converter tooltip in tools menu
Update file converter tooltip in tools menu
Python
bsd-3-clause
SasView/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview
python
## Code Before: import logging from sas.sasgui.guiframe.plugin_base import PluginBase from sas.sasgui.perspectives.file_converter.converter_panel import ConverterWindow logger = logging.getLogger(__name__) class Plugin(PluginBase): """ This class defines the interface for a Plugin class for File Converter perspective """ def __init__(self): PluginBase.__init__(self, name="File Converter") logger.info("File Converter plug-in started") self._sub_menu = "Tool" self.converter_frame = None def get_tools(self): """ Returns a set of menu entries """ help_txt = "Convert single column ASCII data to CanSAS format" return [("File Converter", help_txt, self.on_file_converter)] def on_file_converter(self, event): if self.converter_frame is None: frame = ConverterWindow(parent=self.parent, base=self.parent, manager=self) self.put_icon(frame) self.converter_frame = frame else: self.converter_frame.Show(False) self.converter_frame.Show(True) def put_icon(self, frame): """ Put icon in the frame title bar """ if hasattr(frame, "IsIconized"): if not frame.IsIconized(): try: icon = self.parent.GetIcon() frame.SetIcon(icon) except: pass ## Instruction: Update file converter tooltip in tools menu ## Code After: import logging from sas.sasgui.guiframe.plugin_base import PluginBase from sas.sasgui.perspectives.file_converter.converter_panel import ConverterWindow logger = logging.getLogger(__name__) class Plugin(PluginBase): """ This class defines the interface for a Plugin class for File Converter perspective """ def __init__(self): PluginBase.__init__(self, name="File Converter") logger.info("File Converter plug-in started") self._sub_menu = "Tool" self.converter_frame = None def get_tools(self): """ Returns a set of menu entries """ help_txt = "Convert ASCII or BSL/OTOKO data to CanSAS or NXcanSAS formats" return [("File Converter", help_txt, self.on_file_converter)] def on_file_converter(self, event): if self.converter_frame is None: frame = ConverterWindow(parent=self.parent, base=self.parent, manager=self) self.put_icon(frame) self.converter_frame = frame else: self.converter_frame.Show(False) self.converter_frame.Show(True) def put_icon(self, frame): """ Put icon in the frame title bar """ if hasattr(frame, "IsIconized"): if not frame.IsIconized(): try: icon = self.parent.GetIcon() frame.SetIcon(icon) except: pass
# ... existing code ... """ Returns a set of menu entries """ help_txt = "Convert ASCII or BSL/OTOKO data to CanSAS or NXcanSAS formats" return [("File Converter", help_txt, self.on_file_converter)] def on_file_converter(self, event): # ... rest of the code ...
0498778db28fd2e2272b48fb84a99eece7b662ff
autocorrect.py
autocorrect.py
wordFile = open("words.txt") threshold = 8 listOfWords = input().split() index = 0 def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a == b)) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 print(*listOfWords)
wordFile = open("words.txt") threshold = 8 listOfWords = input().split() index = 0 # Compute Levenshtein distance def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) elif len(a) == len(b): # Use Hamming Distance (special case) return sum(x != y for x, y in zip(a, b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a[-1] == b[-1])) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 wordFile.seek(0) print(*listOfWords)
Use Hamming distance for efficiency
Use Hamming distance for efficiency Hamming distance is faster when strings are of same length (Hamming is a special case of Levenshtein).
Python
mit
jmanuel1/spellingbee
python
## Code Before: wordFile = open("words.txt") threshold = 8 listOfWords = input().split() index = 0 def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a == b)) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 print(*listOfWords) ## Instruction: Use Hamming distance for efficiency Hamming distance is faster when strings are of same length (Hamming is a special case of Levenshtein). ## Code After: wordFile = open("words.txt") threshold = 8 listOfWords = input().split() index = 0 # Compute Levenshtein distance def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) elif len(a) == len(b): # Use Hamming Distance (special case) return sum(x != y for x, y in zip(a, b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a[-1] == b[-1])) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 wordFile.seek(0) print(*listOfWords)
... listOfWords = input().split() index = 0 # Compute Levenshtein distance def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) elif len(a) == len(b): # Use Hamming Distance (special case) return sum(x != y for x, y in zip(a, b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a[-1] == b[-1])) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 wordFile.seek(0) print(*listOfWords) ...
2a43183f5d2c14bacb92fe563d3c2ddf61b116da
tests/testMain.py
tests/testMain.py
import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXMLDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) if __name__ == '__main__': unittest.main()
import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) def testCsvDocumentExists(self): self.assertTrue(os.path.exists(csv_doc)) if __name__ == '__main__': unittest.main()
Make naming consistent with our standard (camelcase always, even with acronymn)
Make naming consistent with our standard (camelcase always, even with acronymn)
Python
mpl-2.0
EsriOceans/btm
python
## Code Before: import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXMLDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) if __name__ == '__main__': unittest.main() ## Instruction: Make naming consistent with our standard (camelcase always, even with acronymn) ## Code After: import os import unittest import numpy import arcpy from utils import * # import our constants; # configure test data # XXX: use .ini files for these instead? used in other 'important' unit tests from config import * # import our local directory so we can use the internal modules import_paths = ['../Install/toolbox', '../Install'] addLocalPaths(import_paths) class TestBpiScript(unittest.TestCase): from scripts import bpi def testBpiImport(self, method=bpi): self.assertRaises(ValueError, method.main(), None) def testBpiRun(self): pass class TestStandardizeBpiGridsScript(unittest.TestCase): from scripts import standardize_bpi_grids def testStdImport(self, method=standardize_bpi_grids): pass def testStdRun(self): pass class TestBtmDocument(unittest.TestCase): # XXX this won't automatically get the right thing... how can we fix it? import utils def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) def testCsvDocumentExists(self): self.assertTrue(os.path.exists(csv_doc)) if __name__ == '__main__': unittest.main()
// ... existing code ... # XXX this won't automatically get the right thing... how can we fix it? import utils def testXmlDocumentExists(self): self.assertTrue(os.path.exists(xml_doc)) def testCsvDocumentExists(self): self.assertTrue(os.path.exists(csv_doc)) if __name__ == '__main__': // ... rest of the code ...
9fe3e814e2a74e38fe960fab333b09cf8f00e1b0
setup.py
setup.py
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="[email protected]", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], )
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="[email protected]", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
Add License classifier for pypa
Add License classifier for pypa
Python
bsd-3-clause
joshy/striprtf
python
## Code Before: import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="[email protected]", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], ) ## Instruction: Add License classifier for pypa ## Code After: import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="[email protected]", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
... % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], ) ...
096a8972d610379356d668d57d92010707fcf9e1
blockbuster/__init__.py
blockbuster/__init__.py
__author__ = 'matt' from flask import Flask app = Flask(__name__) def startup(): import blockbuster.bb_dbconnector_factory import blockbuster.bb_logging as log import blockbuster.bb_auditlogger as audit try: if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check(): import blockbuster.bb_routes print("Running...") else: raise RuntimeError("Incorrect database schema version.") except RuntimeError, e: log.logger.exception("Incorrect database schema version.") audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', 'Incorrect database schema version.') startup()
__author__ = 'matt' __version__ = '1.24.02' target_schema_version = '1.24.00' from flask import Flask app = Flask(__name__) def startup(): import blockbuster.bb_dbconnector_factory import blockbuster.bb_logging as log import blockbuster.bb_auditlogger as audit try: if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check(): import blockbuster.bb_routes print("Running...") else: raise RuntimeError("Incorrect database schema version.") except RuntimeError, e: log.logger.exception("Incorrect database schema version.") audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', 'Incorrect database schema version.') startup()
Move version numbers back to the package init file
Move version numbers back to the package init file
Python
mit
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
python
## Code Before: __author__ = 'matt' from flask import Flask app = Flask(__name__) def startup(): import blockbuster.bb_dbconnector_factory import blockbuster.bb_logging as log import blockbuster.bb_auditlogger as audit try: if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check(): import blockbuster.bb_routes print("Running...") else: raise RuntimeError("Incorrect database schema version.") except RuntimeError, e: log.logger.exception("Incorrect database schema version.") audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', 'Incorrect database schema version.') startup() ## Instruction: Move version numbers back to the package init file ## Code After: __author__ = 'matt' __version__ = '1.24.02' target_schema_version = '1.24.00' from flask import Flask app = Flask(__name__) def startup(): import blockbuster.bb_dbconnector_factory import blockbuster.bb_logging as log import blockbuster.bb_auditlogger as audit try: if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check(): import blockbuster.bb_routes print("Running...") else: raise RuntimeError("Incorrect database schema version.") except RuntimeError, e: log.logger.exception("Incorrect database schema version.") audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', 'Incorrect database schema version.') startup()
// ... existing code ... __author__ = 'matt' __version__ = '1.24.02' target_schema_version = '1.24.00' from flask import Flask app = Flask(__name__) // ... rest of the code ...
0e7be2adf1101ae842dddb3db3217957a8e5957f
iati/core/rulesets.py
iati/core/rulesets.py
"""A module containg a core representation of IATI Rulesets.""" class Ruleset(object): """Representation of a Ruleset as defined within the IATI SSOT.""" pass class Rule(object): """Representation of a Rule contained within a Ruleset. Acts as a base class for specific types of Rule that actually do something. """ pass class NoMoreThanOne(Rule): """Representation of a Rule that checks that there is no more than one Element matching a given XPath.""" pass
class Ruleset(object): """Representation of a Ruleset as defined within the IATI SSOT.""" pass class Rule(object): """Representation of a Rule contained within a Ruleset. Acts as a base class for specific types of Rule that actually do something. """ pass class NoMoreThanOne(Rule): """Representation of a Rule that checks that there is no more than one Element matching a given XPath.""" pass
Add a ruleset module TODO
Add a ruleset module TODO
Python
mit
IATI/iati.core,IATI/iati.core
python
## Code Before: """A module containg a core representation of IATI Rulesets.""" class Ruleset(object): """Representation of a Ruleset as defined within the IATI SSOT.""" pass class Rule(object): """Representation of a Rule contained within a Ruleset. Acts as a base class for specific types of Rule that actually do something. """ pass class NoMoreThanOne(Rule): """Representation of a Rule that checks that there is no more than one Element matching a given XPath.""" pass ## Instruction: Add a ruleset module TODO ## Code After: class Ruleset(object): """Representation of a Ruleset as defined within the IATI SSOT.""" pass class Rule(object): """Representation of a Rule contained within a Ruleset. Acts as a base class for specific types of Rule that actually do something. """ pass class NoMoreThanOne(Rule): """Representation of a Rule that checks that there is no more than one Element matching a given XPath.""" pass
// ... existing code ... class Ruleset(object): // ... rest of the code ...
6754475809f825b4ee9025523dd385fd8a8ecd40
src/Prova.java
src/Prova.java
public class Prova { public static void main(String[] args) { System.out.println(42); } }
public class Prova { public static void main(String[] args) { System.out.println(42); System.out.println("Prova di modifica"); } }
Test di verifica di aggiornamento
Test di verifica di aggiornamento
Java
mit
mastershadow/ca-esercizi-java
java
## Code Before: public class Prova { public static void main(String[] args) { System.out.println(42); } } ## Instruction: Test di verifica di aggiornamento ## Code After: public class Prova { public static void main(String[] args) { System.out.println(42); System.out.println("Prova di modifica"); } }
// ... existing code ... public static void main(String[] args) { System.out.println(42); System.out.println("Prova di modifica"); } } // ... rest of the code ...
0d1b8597a75f7e24ce3e74f99aad359e27a32be5
fixcity/bmabr/tests/test_templatetags.py
fixcity/bmabr/tests/test_templatetags.py
import unittest class TestRecaptchaTags(unittest.TestCase): def test_recaptcha_html(self): from fixcity.bmabr.templatetags import recaptcha_tags from django.conf import settings html = recaptcha_tags.recaptcha_html() self.failUnless(settings.RECAPTCHA_PUBLIC_KEY in html) self.failUnless(html.startswith('<script')) class TestGoogleTags(unittest.TestCase): def test_google_analytics(self): from fixcity.bmabr.templatetags import google_analytics from django.conf import settings html = google_analytics.google_analytics() self.failUnless(settings.GOOGLE_ANALYTICS_KEY in html) self.failUnless(html.startswith('<script'))
import unittest import mock import django.conf class TestRecaptchaTags(unittest.TestCase): def test_recaptcha_html(self): from fixcity.bmabr.templatetags import recaptcha_tags from django.conf import settings html = recaptcha_tags.recaptcha_html() self.failUnless(settings.RECAPTCHA_PUBLIC_KEY in html) self.failUnless(html.startswith('<script')) class TestGoogleTags(unittest.TestCase): @mock.patch_object(django.conf, 'settings') def test_google_analytics(self, mock_settings): from fixcity.bmabr.templatetags import google_analytics mock_settings.GOOGLE_ANALYTICS_KEY = 'xyzpdq' html = google_analytics.google_analytics() self.failUnless('xyzpdq' in html) self.failUnless(html.startswith('<script')) # For some reason this doesn't work if I put it in a separate # test case... the google_analytics() function keeps a # reference to the OLD mock_settings instance with the # 'xyzpdq' value! mock_settings.GOOGLE_ANALYTICS_KEY = '' html = google_analytics.google_analytics() self.assertEqual(html, '')
Fix tests to work when GOOGLE_ANALYTICS_KEY is not set.
Fix tests to work when GOOGLE_ANALYTICS_KEY is not set.
Python
agpl-3.0
openplans/fixcity,openplans/fixcity
python
## Code Before: import unittest class TestRecaptchaTags(unittest.TestCase): def test_recaptcha_html(self): from fixcity.bmabr.templatetags import recaptcha_tags from django.conf import settings html = recaptcha_tags.recaptcha_html() self.failUnless(settings.RECAPTCHA_PUBLIC_KEY in html) self.failUnless(html.startswith('<script')) class TestGoogleTags(unittest.TestCase): def test_google_analytics(self): from fixcity.bmabr.templatetags import google_analytics from django.conf import settings html = google_analytics.google_analytics() self.failUnless(settings.GOOGLE_ANALYTICS_KEY in html) self.failUnless(html.startswith('<script')) ## Instruction: Fix tests to work when GOOGLE_ANALYTICS_KEY is not set. ## Code After: import unittest import mock import django.conf class TestRecaptchaTags(unittest.TestCase): def test_recaptcha_html(self): from fixcity.bmabr.templatetags import recaptcha_tags from django.conf import settings html = recaptcha_tags.recaptcha_html() self.failUnless(settings.RECAPTCHA_PUBLIC_KEY in html) self.failUnless(html.startswith('<script')) class TestGoogleTags(unittest.TestCase): @mock.patch_object(django.conf, 'settings') def test_google_analytics(self, mock_settings): from fixcity.bmabr.templatetags import google_analytics mock_settings.GOOGLE_ANALYTICS_KEY = 'xyzpdq' html = google_analytics.google_analytics() self.failUnless('xyzpdq' in html) self.failUnless(html.startswith('<script')) # For some reason this doesn't work if I put it in a separate # test case... the google_analytics() function keeps a # reference to the OLD mock_settings instance with the # 'xyzpdq' value! mock_settings.GOOGLE_ANALYTICS_KEY = '' html = google_analytics.google_analytics() self.assertEqual(html, '')
# ... existing code ... import unittest import mock import django.conf class TestRecaptchaTags(unittest.TestCase): # ... modified code ... class TestGoogleTags(unittest.TestCase): @mock.patch_object(django.conf, 'settings') def test_google_analytics(self, mock_settings): from fixcity.bmabr.templatetags import google_analytics mock_settings.GOOGLE_ANALYTICS_KEY = 'xyzpdq' html = google_analytics.google_analytics() self.failUnless('xyzpdq' in html) self.failUnless(html.startswith('<script')) # For some reason this doesn't work if I put it in a separate # test case... the google_analytics() function keeps a # reference to the OLD mock_settings instance with the # 'xyzpdq' value! mock_settings.GOOGLE_ANALYTICS_KEY = '' html = google_analytics.google_analytics() self.assertEqual(html, '') # ... rest of the code ...
06be7bebcc72d2ae77a9004b2a5cc0043df0e9a6
setup.py
setup.py
from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='[email protected]', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='[email protected]', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
Use subprocess32 under Python 2 for subprocess fixes.
Use subprocess32 under Python 2 for subprocess fixes.
Python
mit
regebro/spiny
python
## Code Before: from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='[email protected]', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' ) ## Instruction: Use subprocess32 under Python 2 for subprocess fixes. ## Code After: from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='[email protected]', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
// ... existing code ... from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() // ... modified code ... author_email='[email protected]', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', // ... rest of the code ...
c242ad95221c9c5b2f76795abd7dcbad5145cb2a
datagrid_gtk3/tests/utils/test_transformations.py
datagrid_gtk3/tests/utils/test_transformations.py
"""Data transformation utilities test cases.""" import unittest from datagrid_gtk3.utils.transformations import degree_decimal_str_transform class DegreeDecimalStrTransformTest(unittest.TestCase): """Degree decimal string transformation test case.""" def test_no_basestring(self): """AssertionError raised when no basestring value is passed.""" self.assertRaises(AssertionError, degree_decimal_str_transform, 0) self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23) self.assertRaises(AssertionError, degree_decimal_str_transform, True) def test_no_digit(self): """AssertionError raised when other characters than digits.""" self.assertRaises(AssertionError, degree_decimal_str_transform, '.') self.assertRaises(AssertionError, degree_decimal_str_transform, '+') self.assertRaises(AssertionError, degree_decimal_str_transform, '-') def test_length(self): """AssertionError when more characters than expected passed.""" self.assertRaises( AssertionError, degree_decimal_str_transform, '123456789') def test_point_insertion(self): """Decimal point is inserted in the expected location.""" self.assertEqual( degree_decimal_str_transform('12345678'), '12.345678', ) self.assertEqual( degree_decimal_str_transform('123456'), '0.123456', )
"""Data transformation utilities test cases.""" import unittest from datagrid_gtk3.utils.transformations import degree_decimal_str_transform class DegreeDecimalStrTransformTest(unittest.TestCase): """Degree decimal string transformation test case.""" def test_no_basestring(self): """AssertionError raised when no basestring value is passed.""" self.assertRaises(AssertionError, degree_decimal_str_transform, 0) self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23) self.assertRaises(AssertionError, degree_decimal_str_transform, True) def test_no_digit(self): """AssertionError raised when other characters than digits.""" self.assertRaises(AssertionError, degree_decimal_str_transform, '.') self.assertRaises(AssertionError, degree_decimal_str_transform, '+') self.assertRaises(AssertionError, degree_decimal_str_transform, '-') def test_length(self): """AssertionError when more characters than expected passed.""" self.assertRaises( AssertionError, degree_decimal_str_transform, '123456789') def test_point_insertion(self): """Decimal point is inserted in the expected location.""" self.assertEqual( degree_decimal_str_transform('12345678'), '12.345678', ) self.assertEqual( degree_decimal_str_transform('1234567'), '1.234567', ) self.assertEqual( degree_decimal_str_transform('123456'), '0.123456', ) self.assertEqual( degree_decimal_str_transform('12345'), '0.012345', )
Add more test cases to verify transformer behavior
Add more test cases to verify transformer behavior
Python
mit
nowsecure/datagrid-gtk3,jcollado/datagrid-gtk3
python
## Code Before: """Data transformation utilities test cases.""" import unittest from datagrid_gtk3.utils.transformations import degree_decimal_str_transform class DegreeDecimalStrTransformTest(unittest.TestCase): """Degree decimal string transformation test case.""" def test_no_basestring(self): """AssertionError raised when no basestring value is passed.""" self.assertRaises(AssertionError, degree_decimal_str_transform, 0) self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23) self.assertRaises(AssertionError, degree_decimal_str_transform, True) def test_no_digit(self): """AssertionError raised when other characters than digits.""" self.assertRaises(AssertionError, degree_decimal_str_transform, '.') self.assertRaises(AssertionError, degree_decimal_str_transform, '+') self.assertRaises(AssertionError, degree_decimal_str_transform, '-') def test_length(self): """AssertionError when more characters than expected passed.""" self.assertRaises( AssertionError, degree_decimal_str_transform, '123456789') def test_point_insertion(self): """Decimal point is inserted in the expected location.""" self.assertEqual( degree_decimal_str_transform('12345678'), '12.345678', ) self.assertEqual( degree_decimal_str_transform('123456'), '0.123456', ) ## Instruction: Add more test cases to verify transformer behavior ## Code After: """Data transformation utilities test cases.""" import unittest from datagrid_gtk3.utils.transformations import degree_decimal_str_transform class DegreeDecimalStrTransformTest(unittest.TestCase): """Degree decimal string transformation test case.""" def test_no_basestring(self): """AssertionError raised when no basestring value is passed.""" self.assertRaises(AssertionError, degree_decimal_str_transform, 0) self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23) self.assertRaises(AssertionError, degree_decimal_str_transform, True) def test_no_digit(self): """AssertionError raised when other characters than digits.""" self.assertRaises(AssertionError, degree_decimal_str_transform, '.') self.assertRaises(AssertionError, degree_decimal_str_transform, '+') self.assertRaises(AssertionError, degree_decimal_str_transform, '-') def test_length(self): """AssertionError when more characters than expected passed.""" self.assertRaises( AssertionError, degree_decimal_str_transform, '123456789') def test_point_insertion(self): """Decimal point is inserted in the expected location.""" self.assertEqual( degree_decimal_str_transform('12345678'), '12.345678', ) self.assertEqual( degree_decimal_str_transform('1234567'), '1.234567', ) self.assertEqual( degree_decimal_str_transform('123456'), '0.123456', ) self.assertEqual( degree_decimal_str_transform('12345'), '0.012345', )
... '12.345678', ) self.assertEqual( degree_decimal_str_transform('1234567'), '1.234567', ) self.assertEqual( degree_decimal_str_transform('123456'), '0.123456', ) self.assertEqual( degree_decimal_str_transform('12345'), '0.012345', ) ...
873572d6731d35647dca90b4aea4d7f26866f676
setup.py
setup.py
from setuptools import setup version = '0.4.2' setup( name='sqlalchemy-vertica-python', version=version, description='Vertica dialect for sqlalchemy using vertica_python', long_description=open("README.rst").read(), license="MIT", url='https://github.com/LocusEnergy/sqlalchemy-vertica-python', download_url = 'https://github.com/LocusEnergy/sqlalchemy-vertica-python/tarball/{}'.format(version), author='Locus Energy', author_email='[email protected]', packages=[ 'sqla_vertica_python', ], entry_points=""" [sqlalchemy.dialects] vertica.vertica_python = sqla_vertica_python.vertica_python:VerticaDialect """, install_requires=[ 'vertica_python' ], )
from setuptools import setup version = '0.4.2' setup( name='sqlalchemy-vertica-python', version=version, description='Vertica dialect for sqlalchemy using vertica_python', long_description=open("README.rst").read(), license="MIT", url='https://github.com/LocusEnergy/sqlalchemy-vertica-python', download_url = 'https://github.com/LocusEnergy/sqlalchemy-vertica-python/tarball/{}'.format(version), author='Locus Energy', author_email='[email protected]', packages=[ 'sqla_vertica_python', ], entry_points=""" [sqlalchemy.dialects] vertica.vertica_python = sqla_vertica_python.vertica_python:VerticaDialect """, install_requires=[ 'vertica_python[namedparams]' ], )
Add optional named parameters dependencies for vertica-python
Add optional named parameters dependencies for vertica-python This will allow to play nicely with sqlalchemy-vertica-python and safrs Ref: https://github.com/vertica/vertica-python/pull/247
Python
mit
LocusEnergy/sqlalchemy-vertica-python
python
## Code Before: from setuptools import setup version = '0.4.2' setup( name='sqlalchemy-vertica-python', version=version, description='Vertica dialect for sqlalchemy using vertica_python', long_description=open("README.rst").read(), license="MIT", url='https://github.com/LocusEnergy/sqlalchemy-vertica-python', download_url = 'https://github.com/LocusEnergy/sqlalchemy-vertica-python/tarball/{}'.format(version), author='Locus Energy', author_email='[email protected]', packages=[ 'sqla_vertica_python', ], entry_points=""" [sqlalchemy.dialects] vertica.vertica_python = sqla_vertica_python.vertica_python:VerticaDialect """, install_requires=[ 'vertica_python' ], ) ## Instruction: Add optional named parameters dependencies for vertica-python This will allow to play nicely with sqlalchemy-vertica-python and safrs Ref: https://github.com/vertica/vertica-python/pull/247 ## Code After: from setuptools import setup version = '0.4.2' setup( name='sqlalchemy-vertica-python', version=version, description='Vertica dialect for sqlalchemy using vertica_python', long_description=open("README.rst").read(), license="MIT", url='https://github.com/LocusEnergy/sqlalchemy-vertica-python', download_url = 'https://github.com/LocusEnergy/sqlalchemy-vertica-python/tarball/{}'.format(version), author='Locus Energy', author_email='[email protected]', packages=[ 'sqla_vertica_python', ], entry_points=""" [sqlalchemy.dialects] vertica.vertica_python = sqla_vertica_python.vertica_python:VerticaDialect """, install_requires=[ 'vertica_python[namedparams]' ], )
... vertica.vertica_python = sqla_vertica_python.vertica_python:VerticaDialect """, install_requires=[ 'vertica_python[namedparams]' ], ) ...
027fa84469e17ec4b8948de095388ec94ea40941
api/identifiers/serializers.py
api/identifiers/serializers.py
from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) identifier = LinksField({ 'self': 'get_identifiers' }) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) class Meta: type_ = 'identifiers' def get_identifiers(self, obj): return obj.value
from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) identifier = LinksField({ 'self': 'get_identifiers' }) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) class Meta: type_ = 'identifiers' def get_identifiers(self, obj): return obj.value def get_absolute_url(self, obj): return obj.absolute_api_v2_url
Add get_absolute_url method to serializer
Add get_absolute_url method to serializer
Python
apache-2.0
abought/osf.io,mfraezz/osf.io,leb2dg/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,saradbowman/osf.io,cwisecarver/osf.io,aaxelb/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,brianjgeiger/osf.io,wearpants/osf.io,samchrisinger/osf.io,chennan47/osf.io,caneruguz/osf.io,jnayak1/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,emetsger/osf.io,kwierman/osf.io,DanielSBrown/osf.io,abought/osf.io,acshi/osf.io,brianjgeiger/osf.io,adlius/osf.io,kwierman/osf.io,monikagrabowska/osf.io,acshi/osf.io,erinspace/osf.io,TomBaxter/osf.io,zamattiac/osf.io,aaxelb/osf.io,caseyrollins/osf.io,caneruguz/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,amyshi188/osf.io,mattclark/osf.io,binoculars/osf.io,Nesiehr/osf.io,chrisseto/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,laurenrevere/osf.io,saradbowman/osf.io,cwisecarver/osf.io,acshi/osf.io,sloria/osf.io,samchrisinger/osf.io,mfraezz/osf.io,TomBaxter/osf.io,binoculars/osf.io,mluo613/osf.io,chennan47/osf.io,binoculars/osf.io,icereval/osf.io,emetsger/osf.io,SSJohns/osf.io,amyshi188/osf.io,alexschiller/osf.io,Nesiehr/osf.io,zamattiac/osf.io,adlius/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,crcresearch/osf.io,chrisseto/osf.io,hmoco/osf.io,hmoco/osf.io,wearpants/osf.io,alexschiller/osf.io,baylee-d/osf.io,caseyrollins/osf.io,leb2dg/osf.io,Nesiehr/osf.io,kwierman/osf.io,hmoco/osf.io,aaxelb/osf.io,mfraezz/osf.io,SSJohns/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,mattclark/osf.io,DanielSBrown/osf.io,hmoco/osf.io,adlius/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,zamattiac/osf.io,acshi/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,felliott/osf.io,erinspace/osf.io,emetsger/osf.io,baylee-d/osf.io,leb2dg/osf.io,baylee-d/osf.io,aaxelb/osf.io,crcresearch/osf.io,emetsger/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,mattclark/osf.io,jnayak1/osf.io,cslzchen/osf.io,rdhyee/osf.io,alexschiller/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,mluke93/osf.io,amyshi188/osf.io,TomBaxter/osf.io,adlius/osf.io,rdhyee/osf.io,samchrisinger/osf.io,chennan47/osf.io,samchrisinger/osf.io,sloria/osf.io,leb2dg/osf.io,mluo613/osf.io,chrisseto/osf.io,abought/osf.io,cslzchen/osf.io,rdhyee/osf.io,amyshi188/osf.io,sloria/osf.io,acshi/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,mluke93/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,caneruguz/osf.io,pattisdr/osf.io,cslzchen/osf.io,jnayak1/osf.io,felliott/osf.io,mluo613/osf.io,icereval/osf.io,felliott/osf.io,abought/osf.io,pattisdr/osf.io,mluo613/osf.io,caneruguz/osf.io,jnayak1/osf.io,alexschiller/osf.io,laurenrevere/osf.io,erinspace/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,wearpants/osf.io,mluke93/osf.io,Johnetordoff/osf.io,icereval/osf.io
python
## Code Before: from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) identifier = LinksField({ 'self': 'get_identifiers' }) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) class Meta: type_ = 'identifiers' def get_identifiers(self, obj): return obj.value ## Instruction: Add get_absolute_url method to serializer ## Code After: from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) identifier = LinksField({ 'self': 'get_identifiers' }) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) class Meta: type_ = 'identifiers' def get_identifiers(self, obj): return obj.value def get_absolute_url(self, obj): return obj.absolute_api_v2_url
# ... existing code ... def get_identifiers(self, obj): return obj.value def get_absolute_url(self, obj): return obj.absolute_api_v2_url # ... rest of the code ...
4fe675af1cc8eb65f843e06962763dab8c920ce5
contrib/meson/GetLz4LibraryVersion.py
contrib/meson/GetLz4LibraryVersion.py
import re import sys def usage(): print('usage: python3 GetLz4LibraryVersion.py <path/to/lz4.h>') sys.exit(1) def find_version(filepath): version_file_data = None with open(filepath) as fd: version_file_data = fd.read() patterns = r"""#\s*define\s+LZ4_VERSION_MAJOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_MINOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_RELEASE\s+([0-9]+).*$ """ regex = re.compile(patterns, re.MULTILINE) version_match = regex.search(version_file_data) if version_match: return version_match.groups() raise Exception("Unable to find version string.") def main(): if len(sys.argv) < 2: usage() filepath = sys.argv[1] version_tup = find_version(filepath) print('.'.join(version_tup)) if __name__ == '__main__': main()
import re import sys def find_version(filepath): version_file_data = None with open(filepath) as fd: version_file_data = fd.read() patterns = r"""#\s*define\s+LZ4_VERSION_MAJOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_MINOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_RELEASE\s+([0-9]+).*$ """ regex = re.compile(patterns, re.MULTILINE) version_match = regex.search(version_file_data) if version_match: return version_match.groups() raise Exception("Unable to find version string.") def main(): import argparse parser = argparse.ArgumentParser(description='Print lz4 version from lib/lz4.h') parser.add_argument('file', help='path to lib/lz4.h') args = parser.parse_args() filepath = args.file version_tup = find_version(filepath) print('.'.join(version_tup)) if __name__ == '__main__': main()
Use argparse instead of manually parsing
Use argparse instead of manually parsing [skip ci]
Python
isc
unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso
python
## Code Before: import re import sys def usage(): print('usage: python3 GetLz4LibraryVersion.py <path/to/lz4.h>') sys.exit(1) def find_version(filepath): version_file_data = None with open(filepath) as fd: version_file_data = fd.read() patterns = r"""#\s*define\s+LZ4_VERSION_MAJOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_MINOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_RELEASE\s+([0-9]+).*$ """ regex = re.compile(patterns, re.MULTILINE) version_match = regex.search(version_file_data) if version_match: return version_match.groups() raise Exception("Unable to find version string.") def main(): if len(sys.argv) < 2: usage() filepath = sys.argv[1] version_tup = find_version(filepath) print('.'.join(version_tup)) if __name__ == '__main__': main() ## Instruction: Use argparse instead of manually parsing [skip ci] ## Code After: import re import sys def find_version(filepath): version_file_data = None with open(filepath) as fd: version_file_data = fd.read() patterns = r"""#\s*define\s+LZ4_VERSION_MAJOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_MINOR\s+([0-9]+).*$ #\s*define\s+LZ4_VERSION_RELEASE\s+([0-9]+).*$ """ regex = re.compile(patterns, re.MULTILINE) version_match = regex.search(version_file_data) if version_match: return version_match.groups() raise Exception("Unable to find version string.") def main(): import argparse parser = argparse.ArgumentParser(description='Print lz4 version from lib/lz4.h') parser.add_argument('file', help='path to lib/lz4.h') args = parser.parse_args() filepath = args.file version_tup = find_version(filepath) print('.'.join(version_tup)) if __name__ == '__main__': main()
// ... existing code ... import re import sys def find_version(filepath): // ... modified code ... def main(): import argparse parser = argparse.ArgumentParser(description='Print lz4 version from lib/lz4.h') parser.add_argument('file', help='path to lib/lz4.h') args = parser.parse_args() filepath = args.file version_tup = find_version(filepath) print('.'.join(version_tup)) // ... rest of the code ...
cefc69e635bdcabbf8191627913af238b9f974f2
UIKit/UIPanGestureRecognizer+Private.h
UIKit/UIPanGestureRecognizer+Private.h
@interface UIPanGestureRecognizer (Private) @property (setter=_setHysteresis:) BOOL _hysteresis; @end
@interface UIPanGestureRecognizer (Private) @property (setter=_setHysteresis:) CGFloat _hysteresis; @property (assign, nonatomic) BOOL failsPastMaxTouches; @end
Fix UIPanGestureRecognizer property type; add property
[UIKit] Fix UIPanGestureRecognizer property type; add property
C
unlicense
hbang/headers,hbang/headers
c
## Code Before: @interface UIPanGestureRecognizer (Private) @property (setter=_setHysteresis:) BOOL _hysteresis; @end ## Instruction: [UIKit] Fix UIPanGestureRecognizer property type; add property ## Code After: @interface UIPanGestureRecognizer (Private) @property (setter=_setHysteresis:) CGFloat _hysteresis; @property (assign, nonatomic) BOOL failsPastMaxTouches; @end
// ... existing code ... @interface UIPanGestureRecognizer (Private) @property (setter=_setHysteresis:) CGFloat _hysteresis; @property (assign, nonatomic) BOOL failsPastMaxTouches; @end // ... rest of the code ...
5fc0854f54f2946c2b38a8b3c03a553c8a838aed
shale/webdriver.py
shale/webdriver.py
from selenium import webdriver from selenium.webdriver.remote.switch_to import SwitchTo from selenium.webdriver.remote.mobile import Mobile from selenium.webdriver.remote.errorhandler import ErrorHandler from selenium.webdriver.remote.remote_connection import RemoteConnection class ResumableRemote(webdriver.Remote): def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', session_id=None, **kwargs): #desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False): if session_id is not None: self.command_executor = command_executor if type(self.command_executor) is bytes or isinstance(self.command_executor, str): self.command_executor = RemoteConnection( command_executor, keep_alive=kwargs.get('keep_alive', False)) self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId') self._is_remote = True self.start_client() self.resume_session(session_id) self._switch_to = SwitchTo(self) self._mobile = Mobile(self) self.error_handler = ErrorHandler() else: super(ResumableRemote, self).__init__( command_executor=command_executor, **kwargs) def resume_session(self, session_id): self.session_id = session_id response = self.command_executor.execute('get_session', {'sessionId': session_id}) self.capabilities = response['value']
from selenium import webdriver from selenium.webdriver.remote.switch_to import SwitchTo from selenium.webdriver.remote.mobile import Mobile from selenium.webdriver.remote.errorhandler import ErrorHandler from selenium.webdriver.remote.remote_connection import RemoteConnection class ResumableRemote(webdriver.Remote): def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', session_id=None, **kwargs): if session_id is not None: self.command_executor = command_executor try: string_type = basestring except: string_type = str if isinstance(self.command_executor, (string_type, bytes)): self.command_executor = RemoteConnection( command_executor, keep_alive=kwargs.get('keep_alive', False)) self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId') self._is_remote = True self.start_client() self.resume_session(session_id) self._switch_to = SwitchTo(self) self._mobile = Mobile(self) self.error_handler = ErrorHandler() else: super(ResumableRemote, self).__init__( command_executor=command_executor, **kwargs) def resume_session(self, session_id): self.session_id = session_id response = self.command_executor.execute('get_session', {'sessionId': session_id}) self.capabilities = response['value']
Fix a string type-checking bug.
Fix a string type-checking bug.
Python
mit
cardforcoin/shale,mhluongo/shale,mhluongo/shale,cardforcoin/shale
python
## Code Before: from selenium import webdriver from selenium.webdriver.remote.switch_to import SwitchTo from selenium.webdriver.remote.mobile import Mobile from selenium.webdriver.remote.errorhandler import ErrorHandler from selenium.webdriver.remote.remote_connection import RemoteConnection class ResumableRemote(webdriver.Remote): def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', session_id=None, **kwargs): #desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False): if session_id is not None: self.command_executor = command_executor if type(self.command_executor) is bytes or isinstance(self.command_executor, str): self.command_executor = RemoteConnection( command_executor, keep_alive=kwargs.get('keep_alive', False)) self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId') self._is_remote = True self.start_client() self.resume_session(session_id) self._switch_to = SwitchTo(self) self._mobile = Mobile(self) self.error_handler = ErrorHandler() else: super(ResumableRemote, self).__init__( command_executor=command_executor, **kwargs) def resume_session(self, session_id): self.session_id = session_id response = self.command_executor.execute('get_session', {'sessionId': session_id}) self.capabilities = response['value'] ## Instruction: Fix a string type-checking bug. ## Code After: from selenium import webdriver from selenium.webdriver.remote.switch_to import SwitchTo from selenium.webdriver.remote.mobile import Mobile from selenium.webdriver.remote.errorhandler import ErrorHandler from selenium.webdriver.remote.remote_connection import RemoteConnection class ResumableRemote(webdriver.Remote): def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', session_id=None, **kwargs): if session_id is not None: self.command_executor = command_executor try: string_type = basestring except: string_type = str if isinstance(self.command_executor, (string_type, bytes)): self.command_executor = RemoteConnection( command_executor, keep_alive=kwargs.get('keep_alive', False)) self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId') self._is_remote = True self.start_client() self.resume_session(session_id) self._switch_to = SwitchTo(self) self._mobile = Mobile(self) self.error_handler = ErrorHandler() else: super(ResumableRemote, self).__init__( command_executor=command_executor, **kwargs) def resume_session(self, session_id): self.session_id = session_id response = self.command_executor.execute('get_session', {'sessionId': session_id}) self.capabilities = response['value']
... class ResumableRemote(webdriver.Remote): def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', session_id=None, **kwargs): if session_id is not None: self.command_executor = command_executor try: string_type = basestring except: string_type = str if isinstance(self.command_executor, (string_type, bytes)): self.command_executor = RemoteConnection( command_executor, keep_alive=kwargs.get('keep_alive', False)) self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId') ...