commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
4d65fbf8aaead687d5a2a61f0ca4e30846d77c3a
rest-api/core/src/main/java/org/eclipse/kapua/app/api/core/exception/KapuaExceptionMapper.java
rest-api/core/src/main/java/org/eclipse/kapua/app/api/core/exception/KapuaExceptionMapper.java
/******************************************************************************* * Copyright (c) 2018 Eurotech and/or its affiliates and others * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech - initial API and implementation *******************************************************************************/ package org.eclipse.kapua.app.api.core.exception; import org.eclipse.kapua.KapuaException; import org.eclipse.kapua.app.api.core.exception.model.KapuaExceptionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; @Provider public class KapuaExceptionMapper implements ExceptionMapper<KapuaException> { private static final Logger LOG = LoggerFactory.getLogger(KapuaExceptionMapper.class); @Override public Response toResponse(KapuaException kapuaException) { LOG.error("Generic Kapua exception!", kapuaException); return Response .status(Status.UNAUTHORIZED) .entity(new KapuaExceptionInfo(Status.INTERNAL_SERVER_ERROR, kapuaException.getCode(), kapuaException)) .build(); } }
/******************************************************************************* * Copyright (c) 2018 Eurotech and/or its affiliates and others * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech - initial API and implementation *******************************************************************************/ package org.eclipse.kapua.app.api.core.exception; import org.eclipse.kapua.KapuaException; import org.eclipse.kapua.app.api.core.exception.model.KapuaExceptionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; @Provider public class KapuaExceptionMapper implements ExceptionMapper<KapuaException> { private static final Logger LOG = LoggerFactory.getLogger(KapuaExceptionMapper.class); @Override public Response toResponse(KapuaException kapuaException) { LOG.error("Generic Kapua exception!", kapuaException); return Response .serverError() .entity(new KapuaExceptionInfo(Status.INTERNAL_SERVER_ERROR, kapuaException.getCode(), kapuaException)) .build(); } }
Write correct HTTP error code in REST Internal Error
Write correct HTTP error code in REST Internal Error Signed-off-by: Claudio Mezzasalma <[email protected]>
Java
epl-1.0
stzilli/kapua,stzilli/kapua,stzilli/kapua,LeoNerdoG/kapua,stzilli/kapua,LeoNerdoG/kapua,LeoNerdoG/kapua,stzilli/kapua,LeoNerdoG/kapua,LeoNerdoG/kapua
java
## Code Before: /******************************************************************************* * Copyright (c) 2018 Eurotech and/or its affiliates and others * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech - initial API and implementation *******************************************************************************/ package org.eclipse.kapua.app.api.core.exception; import org.eclipse.kapua.KapuaException; import org.eclipse.kapua.app.api.core.exception.model.KapuaExceptionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; @Provider public class KapuaExceptionMapper implements ExceptionMapper<KapuaException> { private static final Logger LOG = LoggerFactory.getLogger(KapuaExceptionMapper.class); @Override public Response toResponse(KapuaException kapuaException) { LOG.error("Generic Kapua exception!", kapuaException); return Response .status(Status.UNAUTHORIZED) .entity(new KapuaExceptionInfo(Status.INTERNAL_SERVER_ERROR, kapuaException.getCode(), kapuaException)) .build(); } } ## Instruction: Write correct HTTP error code in REST Internal Error Signed-off-by: Claudio Mezzasalma <[email protected]> ## Code After: /******************************************************************************* * Copyright (c) 2018 Eurotech and/or its affiliates and others * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech - initial API and implementation *******************************************************************************/ package org.eclipse.kapua.app.api.core.exception; import org.eclipse.kapua.KapuaException; import org.eclipse.kapua.app.api.core.exception.model.KapuaExceptionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; @Provider public class KapuaExceptionMapper implements ExceptionMapper<KapuaException> { private static final Logger LOG = LoggerFactory.getLogger(KapuaExceptionMapper.class); @Override public Response toResponse(KapuaException kapuaException) { LOG.error("Generic Kapua exception!", kapuaException); return Response .serverError() .entity(new KapuaExceptionInfo(Status.INTERNAL_SERVER_ERROR, kapuaException.getCode(), kapuaException)) .build(); } }
... public Response toResponse(KapuaException kapuaException) { LOG.error("Generic Kapua exception!", kapuaException); return Response .serverError() .entity(new KapuaExceptionInfo(Status.INTERNAL_SERVER_ERROR, kapuaException.getCode(), kapuaException)) .build(); } ...
19952d7f437270065a693dc886c867329ec7c4a0
startzone.py
startzone.py
import xmlrpclib from supervisor.xmlrpc import SupervisorTransport def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False): s = xmlrpclib.ServerProxy('http://localhost:9001') import socket try: version = s.twiddler.getAPIVersion() except(socket.error), exc: raise UserWarning("Could not connect to supervisor: %s" % exc) if float(version) >= 0.3: command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid) settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)} try: addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings) except(xmlrpclib.Fault), exc: if "BAD_NAME" in exc.faultString: raise UserWarning("Zone already exists in process list.") else: print exc print exc.faultCode, exc.faultString raise if addtogroup: return True else: raise UserWarning("Couldn't add zone %s to process group." % zoneid) else: raise UserWarning("Twiddler version too old.") if __name__ == "__main__": if start_zone(): print "Started zone successfully."
import xmlrpclib from supervisor.xmlrpc import SupervisorTransport def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False): s = xmlrpclib.ServerProxy('http://localhost:9001') import socket try: version = s.twiddler.getAPIVersion() except(socket.error), exc: raise UserWarning("Could not connect to supervisor: %s" % exc) if float(version) >= 0.3: command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid) settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)} try: addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings) except(xmlrpclib.Fault), exc: if "BAD_NAME" in exc.faultString: raise UserWarning("Zone already exists in process list.") else: print exc print exc.faultCode, exc.faultString raise if addtogroup: return True else: raise UserWarning("Couldn't add zone %s to process group." % zoneid) else: raise UserWarning("Twiddler version too old.") if __name__ == "__main__": if start_zone(): print "Started zone successfully."
Fix up some settings for start_zone()
Fix up some settings for start_zone()
Python
agpl-3.0
cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO
python
## Code Before: import xmlrpclib from supervisor.xmlrpc import SupervisorTransport def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False): s = xmlrpclib.ServerProxy('http://localhost:9001') import socket try: version = s.twiddler.getAPIVersion() except(socket.error), exc: raise UserWarning("Could not connect to supervisor: %s" % exc) if float(version) >= 0.3: command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid) settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)} try: addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings) except(xmlrpclib.Fault), exc: if "BAD_NAME" in exc.faultString: raise UserWarning("Zone already exists in process list.") else: print exc print exc.faultCode, exc.faultString raise if addtogroup: return True else: raise UserWarning("Couldn't add zone %s to process group." % zoneid) else: raise UserWarning("Twiddler version too old.") if __name__ == "__main__": if start_zone(): print "Started zone successfully." ## Instruction: Fix up some settings for start_zone() ## Code After: import xmlrpclib from supervisor.xmlrpc import SupervisorTransport def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False): s = xmlrpclib.ServerProxy('http://localhost:9001') import socket try: version = s.twiddler.getAPIVersion() except(socket.error), exc: raise UserWarning("Could not connect to supervisor: %s" % exc) if float(version) >= 0.3: command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid) settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)} try: addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings) except(xmlrpclib.Fault), exc: if "BAD_NAME" in exc.faultString: raise UserWarning("Zone already exists in process list.") else: print exc print exc.faultCode, exc.faultString raise if addtogroup: return True else: raise UserWarning("Couldn't add zone %s to process group." % zoneid) else: raise UserWarning("Twiddler version too old.") if __name__ == "__main__": if start_zone(): print "Started zone successfully."
... raise UserWarning("Could not connect to supervisor: %s" % exc) if float(version) >= 0.3: command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid) settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)} try: addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings) except(xmlrpclib.Fault), exc: ...
65336509829a42b91b000d2e423ed4581ac61c98
app/mpv.py
app/mpv.py
import sys import json import struct import subprocess # Read a message from stdin and decode it. def getMessage(): rawLength = sys.stdin.read(4) if len(rawLength) == 0: sys.exit(0) messageLength = struct.unpack('@I', rawLength)[0] message = sys.stdin.read(messageLength) return json.loads(message) # Encode a message for transmission, # given its content. def encodeMessage(messageContent): encodedContent = json.dumps(messageContent) encodedLength = struct.pack('@I', len(encodedContent)) return {'length': encodedLength, 'content': encodedContent} # Send an encoded message to stdout def sendMessage(encodedMessage): sys.stdout.write(encodedMessage['length']) sys.stdout.write(encodedMessage['content']) sys.stdout.flush() while True: mpv_args = getMessage() if (len(mpv_args) > 1): subprocess.call(["mpv", mpv_args])
import sys import json import struct import subprocess import shlex # Read a message from stdin and decode it. def getMessage(): rawLength = sys.stdin.read(4) if len(rawLength) == 0: sys.exit(0) messageLength = struct.unpack('@I', rawLength)[0] message = sys.stdin.read(messageLength) return json.loads(message) # Encode a message for transmission, # given its content. def encodeMessage(messageContent): encodedContent = json.dumps(messageContent) encodedLength = struct.pack('@I', len(encodedContent)) return {'length': encodedLength, 'content': encodedContent} # Send an encoded message to stdout def sendMessage(encodedMessage): sys.stdout.write(encodedMessage['length']) sys.stdout.write(encodedMessage['content']) sys.stdout.flush() while True: mpv_args = getMessage() if (len(mpv_args) > 1): args = shlex.split("mpv " + mpv_args) subprocess.call(args) sys.exit(0)
Handle shell args in python scripts
Handle shell args in python scripts
Python
mit
vayan/external-video,vayan/external-video,vayan/external-video,vayan/external-video
python
## Code Before: import sys import json import struct import subprocess # Read a message from stdin and decode it. def getMessage(): rawLength = sys.stdin.read(4) if len(rawLength) == 0: sys.exit(0) messageLength = struct.unpack('@I', rawLength)[0] message = sys.stdin.read(messageLength) return json.loads(message) # Encode a message for transmission, # given its content. def encodeMessage(messageContent): encodedContent = json.dumps(messageContent) encodedLength = struct.pack('@I', len(encodedContent)) return {'length': encodedLength, 'content': encodedContent} # Send an encoded message to stdout def sendMessage(encodedMessage): sys.stdout.write(encodedMessage['length']) sys.stdout.write(encodedMessage['content']) sys.stdout.flush() while True: mpv_args = getMessage() if (len(mpv_args) > 1): subprocess.call(["mpv", mpv_args]) ## Instruction: Handle shell args in python scripts ## Code After: import sys import json import struct import subprocess import shlex # Read a message from stdin and decode it. def getMessage(): rawLength = sys.stdin.read(4) if len(rawLength) == 0: sys.exit(0) messageLength = struct.unpack('@I', rawLength)[0] message = sys.stdin.read(messageLength) return json.loads(message) # Encode a message for transmission, # given its content. def encodeMessage(messageContent): encodedContent = json.dumps(messageContent) encodedLength = struct.pack('@I', len(encodedContent)) return {'length': encodedLength, 'content': encodedContent} # Send an encoded message to stdout def sendMessage(encodedMessage): sys.stdout.write(encodedMessage['length']) sys.stdout.write(encodedMessage['content']) sys.stdout.flush() while True: mpv_args = getMessage() if (len(mpv_args) > 1): args = shlex.split("mpv " + mpv_args) subprocess.call(args) sys.exit(0)
// ... existing code ... import sys import json import struct import subprocess import shlex # Read a message from stdin and decode it. // ... modified code ... rawLength = sys.stdin.read(4) if len(rawLength) == 0: sys.exit(0) messageLength = struct.unpack('@I', rawLength)[0] message = sys.stdin.read(messageLength) return json.loads(message) # Encode a message for transmission, ... while True: mpv_args = getMessage() if (len(mpv_args) > 1): args = shlex.split("mpv " + mpv_args) subprocess.call(args) sys.exit(0) // ... rest of the code ...
cba8ec4754ed3516ba3f873b0879c8379e8f93ab
data_structures/bitorrent/server/udp.py
data_structures/bitorrent/server/udp.py
import struct from twisted.internet.protocol import DatagramProtocol from twisted.internet import reactor class Announce(DatagramProtocol): def parse_connection(self, data): connection, action, transaction_id = struct.unpack("!qii", data) message = struct.pack('!iiq', action, transaction_id, connection) return message def parse_announce(self, data): message = struct.unpack("!qii20s20sqqqiIIiH", data) print message return message def datagramReceived(self, data, (host, port)): if len(data) < 90: data = self.parse_connection(data) else: data = self.parse_announce(data) self.transport.write(data, (host, port)) reactor.listenUDP(9999, Announce()) reactor.run()
import struct from twisted.internet.protocol import DatagramProtocol from twisted.internet import reactor from announce.torrent import Torrent class Announce(DatagramProtocol): def parse_connection(self, data): connection, action, transaction_id = struct.unpack("!qii", data) message = struct.pack('!iiq', action, transaction_id, connection) return message def parse_announce(self, data, host, port): connection_id, action, transaction_id, info_hash, peer_id, downloaded, \ left, uploaded, event, ip, key, num_want, \ port = struct.unpack("!qii40s20sqqqiIIiH", data) torrent = Torrent(info_hash) if not torrent.can_announce(peer_id): error_message = "You need to wait 5 minutes to reannounce yourself" response = struct.pack('!ii%ss' % len(error_message), action, transaction_id, error_message) else: torrent.peers = "%s:%s" % (host, port) torrent.set_announce(peer_id) response = struct.pack('!iiiii', action, transaction_id, 5 * 60, torrent.leechers, torrent.seeders) response += torrent.binary_peers return response def datagramReceived(self, data, (host, port)): if len(data) < 90: data = self.parse_connection(data) else: data = self.parse_announce(data, host, port) self.transport.write(data, (host, port)) reactor.listenUDP(9999, Announce()) reactor.run()
Send back announce response to client
Send back announce response to client
Python
apache-2.0
vtemian/university_projects,vtemian/university_projects,vtemian/university_projects
python
## Code Before: import struct from twisted.internet.protocol import DatagramProtocol from twisted.internet import reactor class Announce(DatagramProtocol): def parse_connection(self, data): connection, action, transaction_id = struct.unpack("!qii", data) message = struct.pack('!iiq', action, transaction_id, connection) return message def parse_announce(self, data): message = struct.unpack("!qii20s20sqqqiIIiH", data) print message return message def datagramReceived(self, data, (host, port)): if len(data) < 90: data = self.parse_connection(data) else: data = self.parse_announce(data) self.transport.write(data, (host, port)) reactor.listenUDP(9999, Announce()) reactor.run() ## Instruction: Send back announce response to client ## Code After: import struct from twisted.internet.protocol import DatagramProtocol from twisted.internet import reactor from announce.torrent import Torrent class Announce(DatagramProtocol): def parse_connection(self, data): connection, action, transaction_id = struct.unpack("!qii", data) message = struct.pack('!iiq', action, transaction_id, connection) return message def parse_announce(self, data, host, port): connection_id, action, transaction_id, info_hash, peer_id, downloaded, \ left, uploaded, event, ip, key, num_want, \ port = struct.unpack("!qii40s20sqqqiIIiH", data) torrent = Torrent(info_hash) if not torrent.can_announce(peer_id): error_message = "You need to wait 5 minutes to reannounce yourself" response = struct.pack('!ii%ss' % len(error_message), action, transaction_id, error_message) else: torrent.peers = "%s:%s" % (host, port) torrent.set_announce(peer_id) response = struct.pack('!iiiii', action, transaction_id, 5 * 60, torrent.leechers, torrent.seeders) response += torrent.binary_peers return response def datagramReceived(self, data, (host, port)): if len(data) < 90: data = self.parse_connection(data) else: data = self.parse_announce(data, host, port) self.transport.write(data, (host, port)) reactor.listenUDP(9999, Announce()) reactor.run()
... from twisted.internet.protocol import DatagramProtocol from twisted.internet import reactor from announce.torrent import Torrent class Announce(DatagramProtocol): ... message = struct.pack('!iiq', action, transaction_id, connection) return message def parse_announce(self, data, host, port): connection_id, action, transaction_id, info_hash, peer_id, downloaded, \ left, uploaded, event, ip, key, num_want, \ port = struct.unpack("!qii40s20sqqqiIIiH", data) torrent = Torrent(info_hash) if not torrent.can_announce(peer_id): error_message = "You need to wait 5 minutes to reannounce yourself" response = struct.pack('!ii%ss' % len(error_message), action, transaction_id, error_message) else: torrent.peers = "%s:%s" % (host, port) torrent.set_announce(peer_id) response = struct.pack('!iiiii', action, transaction_id, 5 * 60, torrent.leechers, torrent.seeders) response += torrent.binary_peers return response def datagramReceived(self, data, (host, port)): if len(data) < 90: data = self.parse_connection(data) else: data = self.parse_announce(data, host, port) self.transport.write(data, (host, port)) ...
8eb3c6aa123cecec826c3c07f98b2d2b84c265af
scrapi/registry.py
scrapi/registry.py
import sys class _Registry(dict): # These must be defined so that doctest gathering doesn't make # pytest crash when trying to figure out what/where scrapi.registry is __file__ = __file__ __name__ = __name__ def __init__(self): dict.__init__(self) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise KeyError('No harvester named "{}"'.format(key)) @property def beat_schedule(self): from celery.schedules import crontab return { 'run_{}'.format(name): { 'args': [name], 'schedule': crontab(**inst.run_at), 'task': 'scrapi.tasks.run_harvester', } for name, inst in self.items() } sys.modules[__name__] = _Registry()
import sys class _Registry(dict): # These must be defined so that doctest gathering doesn't make # pytest crash when trying to figure out what/where scrapi.registry is __file__ = __file__ __name__ = __name__ def __init__(self): dict.__init__(self) def __hash__(self): return hash(self.freeze(self)) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise KeyError('No harvester named "{}"'.format(key)) def freeze(self, o): if isinstance(o, dict): return frozenset({k: self.freeze(v) for k, v in o.items()}.items()) elif isinstance(o, list): return tuple(map(self.freeze, o)) return o @property def beat_schedule(self): from celery.schedules import crontab return { 'run_{}'.format(name): { 'args': [name], 'schedule': crontab(**inst.run_at), 'task': 'scrapi.tasks.run_harvester', } for name, inst in self.items() } sys.modules[__name__] = _Registry()
Make _Registry hashable so that django can import from scrapi
Make _Registry hashable so that django can import from scrapi
Python
apache-2.0
fabianvf/scrapi,felliott/scrapi,erinspace/scrapi,mehanig/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,mehanig/scrapi,felliott/scrapi,CenterForOpenScience/scrapi
python
## Code Before: import sys class _Registry(dict): # These must be defined so that doctest gathering doesn't make # pytest crash when trying to figure out what/where scrapi.registry is __file__ = __file__ __name__ = __name__ def __init__(self): dict.__init__(self) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise KeyError('No harvester named "{}"'.format(key)) @property def beat_schedule(self): from celery.schedules import crontab return { 'run_{}'.format(name): { 'args': [name], 'schedule': crontab(**inst.run_at), 'task': 'scrapi.tasks.run_harvester', } for name, inst in self.items() } sys.modules[__name__] = _Registry() ## Instruction: Make _Registry hashable so that django can import from scrapi ## Code After: import sys class _Registry(dict): # These must be defined so that doctest gathering doesn't make # pytest crash when trying to figure out what/where scrapi.registry is __file__ = __file__ __name__ = __name__ def __init__(self): dict.__init__(self) def __hash__(self): return hash(self.freeze(self)) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise KeyError('No harvester named "{}"'.format(key)) def freeze(self, o): if isinstance(o, dict): return frozenset({k: self.freeze(v) for k, v in o.items()}.items()) elif isinstance(o, list): return tuple(map(self.freeze, o)) return o @property def beat_schedule(self): from celery.schedules import crontab return { 'run_{}'.format(name): { 'args': [name], 'schedule': crontab(**inst.run_at), 'task': 'scrapi.tasks.run_harvester', } for name, inst in self.items() } sys.modules[__name__] = _Registry()
# ... existing code ... def __init__(self): dict.__init__(self) def __hash__(self): return hash(self.freeze(self)) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise KeyError('No harvester named "{}"'.format(key)) def freeze(self, o): if isinstance(o, dict): return frozenset({k: self.freeze(v) for k, v in o.items()}.items()) elif isinstance(o, list): return tuple(map(self.freeze, o)) return o @property def beat_schedule(self): # ... rest of the code ...
c974a2fe075accdf58148fceb3f722b144e0b8d8
diylang/types.py
diylang/types.py
class DiyLangError(Exception): """General DIY Lang error class.""" pass class Closure: def __init__(self, env, params, body): raise NotImplementedError("DIY") def __repr__(self): return "<closure/%d>" % len(self.params) class Environment: def __init__(self, variables=None): self.bindings = variables if variables else {} def lookup(self, symbol): raise NotImplementedError("DIY") def extend(self, variables): raise NotImplementedError("DIY") def set(self, symbol, value): raise NotImplementedError("DIY") class String: """ Simple data object for representing DIY Lang strings. Ignore this until you start working on part 8. """ def __init__(self, val=""): self.val = val def __str__(self): return '"{}"'.format(self.val) def __eq__(self, other): return isinstance(other, String) and other.val == self.val
class DiyLangError(Exception): """General DIY Lang error class.""" pass class Closure(object): def __init__(self, env, params, body): raise NotImplementedError("DIY") def __repr__(self): return "<closure/%d>" % len(self.params) class Environment(object): def __init__(self, variables=None): self.bindings = variables if variables else {} def lookup(self, symbol): raise NotImplementedError("DIY") def extend(self, variables): raise NotImplementedError("DIY") def set(self, symbol, value): raise NotImplementedError("DIY") class String(object): """ Simple data object for representing DIY Lang strings. Ignore this until you start working on part 8. """ def __init__(self, val=""): self.val = val def __str__(self): return '"{}"'.format(self.val) def __eq__(self, other): return isinstance(other, String) and other.val == self.val
Fix Old-style class, subclass object explicitly.
Fix Old-style class, subclass object explicitly.
Python
bsd-3-clause
kvalle/diy-lisp,kvalle/diy-lisp,kvalle/diy-lang,kvalle/diy-lang
python
## Code Before: class DiyLangError(Exception): """General DIY Lang error class.""" pass class Closure: def __init__(self, env, params, body): raise NotImplementedError("DIY") def __repr__(self): return "<closure/%d>" % len(self.params) class Environment: def __init__(self, variables=None): self.bindings = variables if variables else {} def lookup(self, symbol): raise NotImplementedError("DIY") def extend(self, variables): raise NotImplementedError("DIY") def set(self, symbol, value): raise NotImplementedError("DIY") class String: """ Simple data object for representing DIY Lang strings. Ignore this until you start working on part 8. """ def __init__(self, val=""): self.val = val def __str__(self): return '"{}"'.format(self.val) def __eq__(self, other): return isinstance(other, String) and other.val == self.val ## Instruction: Fix Old-style class, subclass object explicitly. ## Code After: class DiyLangError(Exception): """General DIY Lang error class.""" pass class Closure(object): def __init__(self, env, params, body): raise NotImplementedError("DIY") def __repr__(self): return "<closure/%d>" % len(self.params) class Environment(object): def __init__(self, variables=None): self.bindings = variables if variables else {} def lookup(self, symbol): raise NotImplementedError("DIY") def extend(self, variables): raise NotImplementedError("DIY") def set(self, symbol, value): raise NotImplementedError("DIY") class String(object): """ Simple data object for representing DIY Lang strings. Ignore this until you start working on part 8. """ def __init__(self, val=""): self.val = val def __str__(self): return '"{}"'.format(self.val) def __eq__(self, other): return isinstance(other, String) and other.val == self.val
// ... existing code ... pass class Closure(object): def __init__(self, env, params, body): raise NotImplementedError("DIY") // ... modified code ... return "<closure/%d>" % len(self.params) class Environment(object): def __init__(self, variables=None): self.bindings = variables if variables else {} ... raise NotImplementedError("DIY") class String(object): """ Simple data object for representing DIY Lang strings. // ... rest of the code ...
0061b22b7bfe41ca8e5ec39a199fc1c8fa9203d0
libhfuzz/libhfuzz.h
libhfuzz/libhfuzz.h
extern "C" { #endif /* * buf: input fuzzing data * len: size of the 'buf' data * * Return value: should return 0 */ int LLVMFuzzerTestOneInput(const uint8_t * buf, size_t len); /* * argc: ptr to main's argc * argv: ptr to main's argv * * Return value: ignored */ int LLVMFuzzerInitialize(int *argc, char ***argv); /* * * An alternative for LLVMFuzzerTestOneInput() * * buf_ptr: will be set to input fuzzing data * len_ptr: will be set to the size of the input fuzzing data */ void HF_ITER(const uint8_t ** buf_ptr, size_t * len_ptr); #if defined(_HF_ARCH_LINUX) /* * Enter Linux namespaces * * cloneFlags: see 'man unshare' */ bool linuxEnterNs(uintptr_t cloneFlags); /* * Bring network interface up * * ifacename: name of the interface, typically "lo" */ bool linuxIfaceUp(const char *ifacename); /* * Mount tmpfs over a mount point * * dst: mount point for tmfs */ bool linuxMountTmpfs(const char *dst); #endif /* defined(_HF_ARCH_LINUX) */ #ifdef __cplusplus } /* extern "C" */ #endif
extern "C" { #endif /* * buf: input fuzzing data * len: size of the 'buf' data * * Return value: should return 0 */ int LLVMFuzzerTestOneInput(const uint8_t * buf, size_t len); /* * argc: ptr to main's argc * argv: ptr to main's argv * * Return value: ignored */ int LLVMFuzzerInitialize(int *argc, char ***argv); /* * * An alternative for LLVMFuzzerTestOneInput() * * buf_ptr: will be set to input fuzzing data * len_ptr: will be set to the size of the input fuzzing data */ void HF_ITER(const uint8_t ** buf_ptr, size_t * len_ptr); #if defined(__linux__) /* * Enter Linux namespaces * * cloneFlags: see 'man unshare' */ bool linuxEnterNs(uintptr_t cloneFlags); /* * Bring network interface up * * ifacename: name of the interface, typically "lo" */ bool linuxIfaceUp(const char *ifacename); /* * Mount tmpfs over a mount point * * dst: mount point for tmfs */ bool linuxMountTmpfs(const char *dst); #endif /* defined(__linux__) */ #ifdef __cplusplus } /* extern "C" */ #endif
Change defined(_HF_ARCH_LINUX) -> defined(__linux__) for public includes
Change defined(_HF_ARCH_LINUX) -> defined(__linux__) for public includes
C
apache-2.0
google/honggfuzz,google/honggfuzz,anestisb/honggfuzz,google/honggfuzz
c
## Code Before: extern "C" { #endif /* * buf: input fuzzing data * len: size of the 'buf' data * * Return value: should return 0 */ int LLVMFuzzerTestOneInput(const uint8_t * buf, size_t len); /* * argc: ptr to main's argc * argv: ptr to main's argv * * Return value: ignored */ int LLVMFuzzerInitialize(int *argc, char ***argv); /* * * An alternative for LLVMFuzzerTestOneInput() * * buf_ptr: will be set to input fuzzing data * len_ptr: will be set to the size of the input fuzzing data */ void HF_ITER(const uint8_t ** buf_ptr, size_t * len_ptr); #if defined(_HF_ARCH_LINUX) /* * Enter Linux namespaces * * cloneFlags: see 'man unshare' */ bool linuxEnterNs(uintptr_t cloneFlags); /* * Bring network interface up * * ifacename: name of the interface, typically "lo" */ bool linuxIfaceUp(const char *ifacename); /* * Mount tmpfs over a mount point * * dst: mount point for tmfs */ bool linuxMountTmpfs(const char *dst); #endif /* defined(_HF_ARCH_LINUX) */ #ifdef __cplusplus } /* extern "C" */ #endif ## Instruction: Change defined(_HF_ARCH_LINUX) -> defined(__linux__) for public includes ## Code After: extern "C" { #endif /* * buf: input fuzzing data * len: size of the 'buf' data * * Return value: should return 0 */ int LLVMFuzzerTestOneInput(const uint8_t * buf, size_t len); /* * argc: ptr to main's argc * argv: ptr to main's argv * * Return value: ignored */ int LLVMFuzzerInitialize(int *argc, char ***argv); /* * * An alternative for LLVMFuzzerTestOneInput() * * buf_ptr: will be set to input fuzzing data * len_ptr: will be set to the size of the input fuzzing data */ void HF_ITER(const uint8_t ** buf_ptr, size_t * len_ptr); #if defined(__linux__) /* * Enter Linux namespaces * * cloneFlags: see 'man unshare' */ bool linuxEnterNs(uintptr_t cloneFlags); /* * Bring network interface up * * ifacename: name of the interface, typically "lo" */ bool linuxIfaceUp(const char *ifacename); /* * Mount tmpfs over a mount point * * dst: mount point for tmfs */ bool linuxMountTmpfs(const char *dst); #endif /* defined(__linux__) */ #ifdef __cplusplus } /* extern "C" */ #endif
// ... existing code ... */ void HF_ITER(const uint8_t ** buf_ptr, size_t * len_ptr); #if defined(__linux__) /* * Enter Linux namespaces // ... modified code ... */ bool linuxMountTmpfs(const char *dst); #endif /* defined(__linux__) */ #ifdef __cplusplus } /* extern "C" */ // ... rest of the code ...
b0f17a3780c5cb70b4bd8fa4633e60acd6ff98f5
src/vast/option.h
src/vast/option.h
namespace vast { /// An optional value of `T` with similar semantics as `std::optional`. template <typename T> class option : public cppa::option<T> { typedef cppa::option<T> super; public: #ifdef VAST_HAVE_INHERTING_CONSTRUCTORS using super::option; #else option() = default; option(T x) : super(std::move(x)) { } template <typename T0, typename T1, typename... Ts> option(T0&& x0, T1&& x1, Ts&&... args) : super(std::forward<T0>(x0), std::forward<T1>(x1), std::forward<Ts>(args)...) { } option(const option&) = default; option(option&&) = default; option& operator=(option const&) = default; option& operator=(option&&) = default; #endif T* operator->() const { CPPA_REQUIRE(valid()); return &cppa::option<T>::get(); } }; } // namespace vast #endif
namespace vast { /// An optional value of `T` with similar semantics as `std::optional`. template <typename T> class option : public cppa::option<T> { typedef cppa::option<T> super; public: #ifdef VAST_HAVE_INHERTING_CONSTRUCTORS using super::option; #else option() = default; option(T x) : super(std::move(x)) { } template <typename T0, typename T1, typename... Ts> option(T0&& x0, T1&& x1, Ts&&... args) : super(std::forward<T0>(x0), std::forward<T1>(x1), std::forward<Ts>(args)...) { } option(const option&) = default; option(option&&) = default; option& operator=(option const&) = default; option& operator=(option&&) = default; #endif constexpr T const* operator->() const { return &cppa::option<T>::get(); } }; } // namespace vast #endif
Make operator-> constexpr and return a T const*.
Make operator-> constexpr and return a T const*.
C
bsd-3-clause
mavam/vast,pmos69/vast,vast-io/vast,vast-io/vast,mavam/vast,mavam/vast,pmos69/vast,pmos69/vast,vast-io/vast,vast-io/vast,pmos69/vast,vast-io/vast,mavam/vast
c
## Code Before: namespace vast { /// An optional value of `T` with similar semantics as `std::optional`. template <typename T> class option : public cppa::option<T> { typedef cppa::option<T> super; public: #ifdef VAST_HAVE_INHERTING_CONSTRUCTORS using super::option; #else option() = default; option(T x) : super(std::move(x)) { } template <typename T0, typename T1, typename... Ts> option(T0&& x0, T1&& x1, Ts&&... args) : super(std::forward<T0>(x0), std::forward<T1>(x1), std::forward<Ts>(args)...) { } option(const option&) = default; option(option&&) = default; option& operator=(option const&) = default; option& operator=(option&&) = default; #endif T* operator->() const { CPPA_REQUIRE(valid()); return &cppa::option<T>::get(); } }; } // namespace vast #endif ## Instruction: Make operator-> constexpr and return a T const*. ## Code After: namespace vast { /// An optional value of `T` with similar semantics as `std::optional`. template <typename T> class option : public cppa::option<T> { typedef cppa::option<T> super; public: #ifdef VAST_HAVE_INHERTING_CONSTRUCTORS using super::option; #else option() = default; option(T x) : super(std::move(x)) { } template <typename T0, typename T1, typename... Ts> option(T0&& x0, T1&& x1, Ts&&... args) : super(std::forward<T0>(x0), std::forward<T1>(x1), std::forward<Ts>(args)...) { } option(const option&) = default; option(option&&) = default; option& operator=(option const&) = default; option& operator=(option&&) = default; #endif constexpr T const* operator->() const { return &cppa::option<T>::get(); } }; } // namespace vast #endif
... option& operator=(option&&) = default; #endif constexpr T const* operator->() const { return &cppa::option<T>::get(); } }; ...
e7ce007f4918dae0be8708e47e671d73c551cee8
src/main/java/org/roklib/webapps/uridispatching/parameter/annotation/CapturedParameter.java
src/main/java/org/roklib/webapps/uridispatching/parameter/annotation/CapturedParameter.java
package org.roklib.webapps.uridispatching.parameter.annotation; import java.lang.annotation.*; /** * @author Roland Krüger */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @Documented public @interface CapturedParameter { String name(); }
package org.roklib.webapps.uridispatching.parameter.annotation; import java.lang.annotation.*; /** * @author Roland Krüger */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @Documented public @interface CapturedParameter { String mapperName(); String parameterName(); }
Add two properties to annotation
Add two properties to annotation
Java
apache-2.0
rolandkrueger/uri-fragment-routing
java
## Code Before: package org.roklib.webapps.uridispatching.parameter.annotation; import java.lang.annotation.*; /** * @author Roland Krüger */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @Documented public @interface CapturedParameter { String name(); } ## Instruction: Add two properties to annotation ## Code After: package org.roklib.webapps.uridispatching.parameter.annotation; import java.lang.annotation.*; /** * @author Roland Krüger */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @Documented public @interface CapturedParameter { String mapperName(); String parameterName(); }
... @Target(ElementType.METHOD) @Documented public @interface CapturedParameter { String mapperName(); String parameterName(); } ...
f1957185f0d93861a8ed319223f574df8f4e838f
src/graphql_relay/node/plural.py
src/graphql_relay/node/plural.py
from typing import Any, Callable from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLInputType, GraphQLOutputType, GraphQLList, GraphQLNonNull, GraphQLResolveInfo, ) def plural_identifying_root_field( arg_name: str, input_type: GraphQLInputType, output_type: GraphQLOutputType, resolve_single_input: Callable[[GraphQLResolveInfo, str], Any], description: str = None, ) -> GraphQLField: if isinstance(input_type, GraphQLNonNull): input_type = input_type.of_type input_args = { arg_name: GraphQLArgument( GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type))) ) } def resolve(_obj, info, **args): inputs = args[arg_name] return [resolve_single_input(info, input_) for input_ in inputs] return GraphQLField( GraphQLList(output_type), description=description, args=input_args, resolve=resolve, )
from typing import Any, Callable from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLInputType, GraphQLOutputType, GraphQLList, GraphQLNonNull, GraphQLResolveInfo, is_non_null_type, ) def plural_identifying_root_field( arg_name: str, input_type: GraphQLInputType, output_type: GraphQLOutputType, resolve_single_input: Callable[[GraphQLResolveInfo, str], Any], description: str = None, ) -> GraphQLField: if is_non_null_type(input_type): input_type = input_type.of_type input_args = { arg_name: GraphQLArgument( GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type))) ) } def resolve(_obj, info, **args): inputs = args[arg_name] return [resolve_single_input(info, input_) for input_ in inputs] return GraphQLField( GraphQLList(output_type), description=description, args=input_args, resolve=resolve, )
Use graphql's predicate function instead of 'isinstance'
Use graphql's predicate function instead of 'isinstance' Replicates graphql/graphql-relay-js@5b428507ef246be7ca3afb3589c410874a57f9bc
Python
mit
graphql-python/graphql-relay-py
python
## Code Before: from typing import Any, Callable from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLInputType, GraphQLOutputType, GraphQLList, GraphQLNonNull, GraphQLResolveInfo, ) def plural_identifying_root_field( arg_name: str, input_type: GraphQLInputType, output_type: GraphQLOutputType, resolve_single_input: Callable[[GraphQLResolveInfo, str], Any], description: str = None, ) -> GraphQLField: if isinstance(input_type, GraphQLNonNull): input_type = input_type.of_type input_args = { arg_name: GraphQLArgument( GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type))) ) } def resolve(_obj, info, **args): inputs = args[arg_name] return [resolve_single_input(info, input_) for input_ in inputs] return GraphQLField( GraphQLList(output_type), description=description, args=input_args, resolve=resolve, ) ## Instruction: Use graphql's predicate function instead of 'isinstance' Replicates graphql/graphql-relay-js@5b428507ef246be7ca3afb3589c410874a57f9bc ## Code After: from typing import Any, Callable from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLInputType, GraphQLOutputType, GraphQLList, GraphQLNonNull, GraphQLResolveInfo, is_non_null_type, ) def plural_identifying_root_field( arg_name: str, input_type: GraphQLInputType, output_type: GraphQLOutputType, resolve_single_input: Callable[[GraphQLResolveInfo, str], Any], description: str = None, ) -> GraphQLField: if is_non_null_type(input_type): input_type = input_type.of_type input_args = { arg_name: GraphQLArgument( GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type))) ) } def resolve(_obj, info, **args): inputs = args[arg_name] return [resolve_single_input(info, input_) for input_ in inputs] return GraphQLField( GraphQLList(output_type), description=description, args=input_args, resolve=resolve, )
# ... existing code ... GraphQLList, GraphQLNonNull, GraphQLResolveInfo, is_non_null_type, ) # ... modified code ... resolve_single_input: Callable[[GraphQLResolveInfo, str], Any], description: str = None, ) -> GraphQLField: if is_non_null_type(input_type): input_type = input_type.of_type input_args = { arg_name: GraphQLArgument( # ... rest of the code ...
e6d28d55309cdf7c25062d469646e0671e877607
nose2/tests/functional/support/scenario/tests_in_package/pkg1/test/test_things.py
nose2/tests/functional/support/scenario/tests_in_package/pkg1/test/test_things.py
import unittest class SomeTests(unittest.TestCase): def test_ok(self): pass def test_typeerr(self): raise TypeError("oops") def test_failed(self): print("Hello stdout") assert False, "I failed" def test_skippy(self): raise unittest.SkipTest("I wanted to skip") def test_func(): assert 1 == 1 def test_gen(): def check(a, b): assert a == b for i in range(0, 5): yield check, (i, i,) test_gen.testGenerator = True def test_gen_nose_style(): def check(a, b): assert a == b for i in range(0, 5): yield check, i, i did_setup = False def setup(): global did_setup did_setup = True def test_fixt(): assert did_setup test_fixt.setup = setup
import unittest class SomeTests(unittest.TestCase): def test_ok(self): pass def test_typeerr(self): raise TypeError("oops") def test_failed(self): print("Hello stdout") assert False, "I failed" def test_skippy(self): raise unittest.SkipTest("I wanted to skip") def test_gen_method(self): def check(x): assert x == 1 yield check, 1 yield check, 2 def test_params_method(self, a): self.assertEqual(a, 1) test_params_method.paramList = (1, 2) def test_func(): assert 1 == 1 def test_gen(): def check(a, b): assert a == b for i in range(0, 5): yield check, (i, i,) test_gen.testGenerator = True def test_gen_nose_style(): def check(a, b): assert a == b for i in range(0, 5): yield check, i, i did_setup = False def setup(): global did_setup did_setup = True def test_fixt(): assert did_setup test_fixt.setup = setup def test_params_func(a): assert a == 1 test_params_func.paramList = (1, 2) def test_params_func_multi_arg(a, b): assert a == b test_params_func_multi_arg.paramList = ((1, 1), (1, 2), (2, 2))
Add param test cases to func test target project
Add param test cases to func test target project
Python
bsd-2-clause
ojengwa/nose2,ezigman/nose2,ezigman/nose2,leth/nose2,leth/nose2,little-dude/nose2,ptthiem/nose2,ptthiem/nose2,little-dude/nose2,ojengwa/nose2
python
## Code Before: import unittest class SomeTests(unittest.TestCase): def test_ok(self): pass def test_typeerr(self): raise TypeError("oops") def test_failed(self): print("Hello stdout") assert False, "I failed" def test_skippy(self): raise unittest.SkipTest("I wanted to skip") def test_func(): assert 1 == 1 def test_gen(): def check(a, b): assert a == b for i in range(0, 5): yield check, (i, i,) test_gen.testGenerator = True def test_gen_nose_style(): def check(a, b): assert a == b for i in range(0, 5): yield check, i, i did_setup = False def setup(): global did_setup did_setup = True def test_fixt(): assert did_setup test_fixt.setup = setup ## Instruction: Add param test cases to func test target project ## Code After: import unittest class SomeTests(unittest.TestCase): def test_ok(self): pass def test_typeerr(self): raise TypeError("oops") def test_failed(self): print("Hello stdout") assert False, "I failed" def test_skippy(self): raise unittest.SkipTest("I wanted to skip") def test_gen_method(self): def check(x): assert x == 1 yield check, 1 yield check, 2 def test_params_method(self, a): self.assertEqual(a, 1) test_params_method.paramList = (1, 2) def test_func(): assert 1 == 1 def test_gen(): def check(a, b): assert a == b for i in range(0, 5): yield check, (i, i,) test_gen.testGenerator = True def test_gen_nose_style(): def check(a, b): assert a == b for i in range(0, 5): yield check, i, i did_setup = False def setup(): global did_setup did_setup = True def test_fixt(): assert did_setup test_fixt.setup = setup def test_params_func(a): assert a == 1 test_params_func.paramList = (1, 2) def test_params_func_multi_arg(a, b): assert a == b test_params_func_multi_arg.paramList = ((1, 1), (1, 2), (2, 2))
# ... existing code ... import unittest class SomeTests(unittest.TestCase): # ... modified code ... def test_skippy(self): raise unittest.SkipTest("I wanted to skip") def test_gen_method(self): def check(x): assert x == 1 yield check, 1 yield check, 2 def test_params_method(self, a): self.assertEqual(a, 1) test_params_method.paramList = (1, 2) def test_func(): ... assert did_setup test_fixt.setup = setup def test_params_func(a): assert a == 1 test_params_func.paramList = (1, 2) def test_params_func_multi_arg(a, b): assert a == b test_params_func_multi_arg.paramList = ((1, 1), (1, 2), (2, 2)) # ... rest of the code ...
80edef487835406b6496dfdce2c6ffb55fd72a60
app/src/main/java/nl/dionsegijn/konfettidemo/configurations/settings/ConfigurationManager.kt
app/src/main/java/nl/dionsegijn/konfettidemo/configurations/settings/ConfigurationManager.kt
package nl.dionsegijn.konfettidemo.configurations.settings import nl.dionsegijn.konfettidemo.R /** * Created by dionsegijn on 5/24/17. */ class ConfigurationManager { var active: Configuration var configurations: List<Configuration> = listOf( Configuration(Configuration.TYPE_STREAM_FROM_TOP, "Top", R.string.stream_from_top_instructions, R.drawable.ic_confetti_ball), Configuration(Configuration.TYPE_DRAG_AND_SHOOT, "Drag 'n Shoot", R.string.drag_and_shoot_app_name_instructions, R.drawable.ic_celebration), Configuration(Configuration.TYPE_BURST_FROM_CENTER, "Burst", R.string.burst_from_center_instructions, R.drawable.ic_fireworks) ) init { active = configurations[0] /** Specific settings for TYPE_BURST_FROM_CENTER configuration */ configurations[2].maxSpeed = 8f configurations[2].timeToLive = 4000L } }
package nl.dionsegijn.konfettidemo.configurations.settings import nl.dionsegijn.konfettidemo.R /** * Created by dionsegijn on 5/24/17. */ class ConfigurationManager { var active: Configuration var configurations: List<Configuration> = listOf( Configuration(Configuration.TYPE_STREAM_FROM_TOP, "Top", R.string.stream_from_top_instructions, R.drawable.ic_confetti_ball), Configuration(Configuration.TYPE_DRAG_AND_SHOOT, "Drag 'n Shoot", R.string.drag_and_shoot_app_name_instructions, R.drawable.ic_celebration), Configuration(Configuration.TYPE_BURST_FROM_CENTER, "Burst", R.string.burst_from_center_instructions, R.drawable.ic_fireworks) ) init { active = configurations[0] /** Specific settings for TYPE_BURST_FROM_CENTER configuration */ configurations[2].minSpeed = 1f configurations[2].maxSpeed = 8f configurations[2].timeToLive = 4000L configurations[2].colors = intArrayOf(R.color.lt_yellow, R.color.lt_orange, R.color.lt_pink, R.color.dk_cyan, R.color.dk_green) } }
Change init configuration for burst mode
Change init configuration for burst mode
Kotlin
isc
DanielMartinus/Konfetti
kotlin
## Code Before: package nl.dionsegijn.konfettidemo.configurations.settings import nl.dionsegijn.konfettidemo.R /** * Created by dionsegijn on 5/24/17. */ class ConfigurationManager { var active: Configuration var configurations: List<Configuration> = listOf( Configuration(Configuration.TYPE_STREAM_FROM_TOP, "Top", R.string.stream_from_top_instructions, R.drawable.ic_confetti_ball), Configuration(Configuration.TYPE_DRAG_AND_SHOOT, "Drag 'n Shoot", R.string.drag_and_shoot_app_name_instructions, R.drawable.ic_celebration), Configuration(Configuration.TYPE_BURST_FROM_CENTER, "Burst", R.string.burst_from_center_instructions, R.drawable.ic_fireworks) ) init { active = configurations[0] /** Specific settings for TYPE_BURST_FROM_CENTER configuration */ configurations[2].maxSpeed = 8f configurations[2].timeToLive = 4000L } } ## Instruction: Change init configuration for burst mode ## Code After: package nl.dionsegijn.konfettidemo.configurations.settings import nl.dionsegijn.konfettidemo.R /** * Created by dionsegijn on 5/24/17. */ class ConfigurationManager { var active: Configuration var configurations: List<Configuration> = listOf( Configuration(Configuration.TYPE_STREAM_FROM_TOP, "Top", R.string.stream_from_top_instructions, R.drawable.ic_confetti_ball), Configuration(Configuration.TYPE_DRAG_AND_SHOOT, "Drag 'n Shoot", R.string.drag_and_shoot_app_name_instructions, R.drawable.ic_celebration), Configuration(Configuration.TYPE_BURST_FROM_CENTER, "Burst", R.string.burst_from_center_instructions, R.drawable.ic_fireworks) ) init { active = configurations[0] /** Specific settings for TYPE_BURST_FROM_CENTER configuration */ configurations[2].minSpeed = 1f configurations[2].maxSpeed = 8f configurations[2].timeToLive = 4000L configurations[2].colors = intArrayOf(R.color.lt_yellow, R.color.lt_orange, R.color.lt_pink, R.color.dk_cyan, R.color.dk_green) } }
# ... existing code ... active = configurations[0] /** Specific settings for TYPE_BURST_FROM_CENTER configuration */ configurations[2].minSpeed = 1f configurations[2].maxSpeed = 8f configurations[2].timeToLive = 4000L configurations[2].colors = intArrayOf(R.color.lt_yellow, R.color.lt_orange, R.color.lt_pink, R.color.dk_cyan, R.color.dk_green) } } # ... rest of the code ...
1010cb2c4a4930254e2586949314aa0bb6b89b3d
tests/test_solver_constraint.py
tests/test_solver_constraint.py
import pytest from gaphas.solver import Constraint, MultiConstraint, Variable @pytest.fixture def handler(): events = [] def handler(e): events.append(e) handler.events = events # type: ignore[attr-defined] return handler def test_constraint_propagates_variable_changed(handler): v = Variable() c = Constraint(v) c.add_handler(handler) v.value = 3 assert handler.events == [c] def test_multi_constraint(handler): v = Variable() c = Constraint(v) m = MultiConstraint(c) m.add_handler(handler) v.value = 3 assert handler.events == [c]
import pytest from gaphas.solver import Constraint, MultiConstraint, Variable @pytest.fixture def handler(): events = [] def handler(e): events.append(e) handler.events = events # type: ignore[attr-defined] return handler def test_constraint_propagates_variable_changed(handler): v = Variable() c = Constraint(v) c.add_handler(handler) v.value = 3 assert handler.events == [c] def test_multi_constraint(handler): v = Variable() c = Constraint(v) m = MultiConstraint(c) m.add_handler(handler) v.value = 3 assert handler.events == [c] def test_default_constraint_can_not_solve(): v = Variable() c = Constraint(v) with pytest.raises(NotImplementedError): c.solve()
Test default case for constraint.solve()
Test default case for constraint.solve()
Python
lgpl-2.1
amolenaar/gaphas
python
## Code Before: import pytest from gaphas.solver import Constraint, MultiConstraint, Variable @pytest.fixture def handler(): events = [] def handler(e): events.append(e) handler.events = events # type: ignore[attr-defined] return handler def test_constraint_propagates_variable_changed(handler): v = Variable() c = Constraint(v) c.add_handler(handler) v.value = 3 assert handler.events == [c] def test_multi_constraint(handler): v = Variable() c = Constraint(v) m = MultiConstraint(c) m.add_handler(handler) v.value = 3 assert handler.events == [c] ## Instruction: Test default case for constraint.solve() ## Code After: import pytest from gaphas.solver import Constraint, MultiConstraint, Variable @pytest.fixture def handler(): events = [] def handler(e): events.append(e) handler.events = events # type: ignore[attr-defined] return handler def test_constraint_propagates_variable_changed(handler): v = Variable() c = Constraint(v) c.add_handler(handler) v.value = 3 assert handler.events == [c] def test_multi_constraint(handler): v = Variable() c = Constraint(v) m = MultiConstraint(c) m.add_handler(handler) v.value = 3 assert handler.events == [c] def test_default_constraint_can_not_solve(): v = Variable() c = Constraint(v) with pytest.raises(NotImplementedError): c.solve()
... v.value = 3 assert handler.events == [c] def test_default_constraint_can_not_solve(): v = Variable() c = Constraint(v) with pytest.raises(NotImplementedError): c.solve() ...
273f829c86435982bb46bd7d0c2f8be9569ce2f6
guice/common/src/main/java/com/peterphi/std/guice/common/serviceprops/ConfigRef.java
guice/common/src/main/java/com/peterphi/std/guice/common/serviceprops/ConfigRef.java
package com.peterphi.std.guice.common.serviceprops; import com.google.inject.Provider; import com.peterphi.std.guice.common.serviceprops.typed.TypedConfigRef; import org.apache.commons.configuration.Configuration; public class ConfigRef implements Provider<String> { private final Configuration configuration; private final String name; public ConfigRef(final Configuration configuration, final String name) { this.configuration = configuration; this.name = name; } @Override public String get() { return configuration.getString(name); } public <T> T get(Class<T> clazz) { return as(clazz).get(); } public <T> TypedConfigRef<T> as(final Class<T> clazz) { return new TypedConfigRef<>(this, clazz); } }
package com.peterphi.std.guice.common.serviceprops; import com.google.inject.Provider; import com.peterphi.std.guice.common.serviceprops.typed.TypedConfigRef; import org.apache.commons.configuration.Configuration; import java.util.List; public class ConfigRef implements Provider<String> { private final Configuration configuration; private final String name; public ConfigRef(final Configuration configuration, final String name) { this.configuration = configuration; this.name = name; } @Override public String get() { return configuration.getString(name); } public String[] getStringArray() { return configuration.getStringArray(name); } public List<Object> getList() { return configuration.getList(name); } public Object getObject() { return configuration.getProperty(name); } public <T> T get(Class<T> clazz) { return as(clazz).get(); } public <T> TypedConfigRef<T> as(final Class<T> clazz) { return new TypedConfigRef<>(this, clazz); } }
Add methods to retrieve a string[] as well as the underlying object if needed
Add methods to retrieve a string[] as well as the underlying object if needed
Java
mit
petergeneric/stdlib,petergeneric/stdlib,petergeneric/stdlib
java
## Code Before: package com.peterphi.std.guice.common.serviceprops; import com.google.inject.Provider; import com.peterphi.std.guice.common.serviceprops.typed.TypedConfigRef; import org.apache.commons.configuration.Configuration; public class ConfigRef implements Provider<String> { private final Configuration configuration; private final String name; public ConfigRef(final Configuration configuration, final String name) { this.configuration = configuration; this.name = name; } @Override public String get() { return configuration.getString(name); } public <T> T get(Class<T> clazz) { return as(clazz).get(); } public <T> TypedConfigRef<T> as(final Class<T> clazz) { return new TypedConfigRef<>(this, clazz); } } ## Instruction: Add methods to retrieve a string[] as well as the underlying object if needed ## Code After: package com.peterphi.std.guice.common.serviceprops; import com.google.inject.Provider; import com.peterphi.std.guice.common.serviceprops.typed.TypedConfigRef; import org.apache.commons.configuration.Configuration; import java.util.List; public class ConfigRef implements Provider<String> { private final Configuration configuration; private final String name; public ConfigRef(final Configuration configuration, final String name) { this.configuration = configuration; this.name = name; } @Override public String get() { return configuration.getString(name); } public String[] getStringArray() { return configuration.getStringArray(name); } public List<Object> getList() { return configuration.getList(name); } public Object getObject() { return configuration.getProperty(name); } public <T> T get(Class<T> clazz) { return as(clazz).get(); } public <T> TypedConfigRef<T> as(final Class<T> clazz) { return new TypedConfigRef<>(this, clazz); } }
... import com.google.inject.Provider; import com.peterphi.std.guice.common.serviceprops.typed.TypedConfigRef; import org.apache.commons.configuration.Configuration; import java.util.List; public class ConfigRef implements Provider<String> { ... } public String[] getStringArray() { return configuration.getStringArray(name); } public List<Object> getList() { return configuration.getList(name); } public Object getObject() { return configuration.getProperty(name); } public <T> T get(Class<T> clazz) { return as(clazz).get(); ...
011f7fbe66cc226cdd2be2e2eeef44df11733251
scrapyard/kickass.py
scrapyard/kickass.py
import cache import network import scraper import urllib KICKASS_URL = 'http://kickass.so' ################################################################################ def movie(movie_info): return __search('category:{0} imdb:{1}'.format('movies', movie_info['imdb_id'][2:])) ################################################################################ def episode(show_info, episode_info): return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', show_info['title'], episode_info['season_index'], episode_info['episode_index'])) ################################################################################ def __search(query): magnet_infos = [] rss_data = network.rss_get_cached_optional(KICKASS_URL + '/usearch/{0}'.format(urllib.quote(query)), expiration=cache.HOUR, params={ 'field': 'seeders', 'sorder': 'desc', 'rss': '1' }) if rss_data: for rss_item in rss_data.entries: magnet_infos.append(scraper.Magnet(rss_item.torrent_magneturi, rss_item.title, int(rss_item.torrent_seeds), int(rss_item.torrent_peers))) return magnet_infos
import cache import network import scraper import urllib KICKASS_URL = 'http://kickass.so' ################################################################################ def movie(movie_info): return __search('category:{0} imdb:{1}'.format('movies', movie_info['imdb_id'][2:])) ################################################################################ def episode(show_info, episode_info): clean_title = show_info['title'].replace('?', '') return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', clean_title, episode_info['season_index'], episode_info['episode_index'])) ################################################################################ def __search(query): magnet_infos = [] rss_data = network.rss_get_cached_optional(KICKASS_URL + '/usearch/{0}'.format(urllib.quote(query)), expiration=cache.HOUR, params={ 'field': 'seeders', 'sorder': 'desc', 'rss': '1' }) if rss_data: for rss_item in rss_data.entries: magnet_infos.append(scraper.Magnet(rss_item.torrent_magneturi, rss_item.title, int(rss_item.torrent_seeds), int(rss_item.torrent_peers))) return magnet_infos
Remove ? from show title when searching
Kickass: Remove ? from show title when searching
Python
mit
sharkone/scrapyard
python
## Code Before: import cache import network import scraper import urllib KICKASS_URL = 'http://kickass.so' ################################################################################ def movie(movie_info): return __search('category:{0} imdb:{1}'.format('movies', movie_info['imdb_id'][2:])) ################################################################################ def episode(show_info, episode_info): return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', show_info['title'], episode_info['season_index'], episode_info['episode_index'])) ################################################################################ def __search(query): magnet_infos = [] rss_data = network.rss_get_cached_optional(KICKASS_URL + '/usearch/{0}'.format(urllib.quote(query)), expiration=cache.HOUR, params={ 'field': 'seeders', 'sorder': 'desc', 'rss': '1' }) if rss_data: for rss_item in rss_data.entries: magnet_infos.append(scraper.Magnet(rss_item.torrent_magneturi, rss_item.title, int(rss_item.torrent_seeds), int(rss_item.torrent_peers))) return magnet_infos ## Instruction: Kickass: Remove ? from show title when searching ## Code After: import cache import network import scraper import urllib KICKASS_URL = 'http://kickass.so' ################################################################################ def movie(movie_info): return __search('category:{0} imdb:{1}'.format('movies', movie_info['imdb_id'][2:])) ################################################################################ def episode(show_info, episode_info): clean_title = show_info['title'].replace('?', '') return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', clean_title, episode_info['season_index'], episode_info['episode_index'])) ################################################################################ def __search(query): magnet_infos = [] rss_data = network.rss_get_cached_optional(KICKASS_URL + '/usearch/{0}'.format(urllib.quote(query)), expiration=cache.HOUR, params={ 'field': 'seeders', 'sorder': 'desc', 'rss': '1' }) if rss_data: for rss_item in rss_data.entries: magnet_infos.append(scraper.Magnet(rss_item.torrent_magneturi, rss_item.title, int(rss_item.torrent_seeds), int(rss_item.torrent_peers))) return magnet_infos
# ... existing code ... ################################################################################ def episode(show_info, episode_info): clean_title = show_info['title'].replace('?', '') return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', clean_title, episode_info['season_index'], episode_info['episode_index'])) ################################################################################ def __search(query): # ... rest of the code ...
eb4c308bbe2824acc1016be761dd2a9713a909a3
vlcclient/vlcmessages.py
vlcclient/vlcmessages.py
''' Minimal VLC client for AceProxy. Messages class. ''' class VlcMessage(object): class request(object): SHUTDOWN = 'shutdown' @staticmethod def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''): return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \ 'http{mux=' + muxer + ',dst=:' + \ str(out_port) + '/' + stream_name + '} enabled' + \ "\r\n" + 'control "' + stream_name + '" play' @staticmethod def stopBroadcast(stream_name): return 'del "' + stream_name + '"' @staticmethod def pauseBroadcast(stream_name): return 'control "' + stream_name + '" pause' @staticmethod def unPauseBroadcast(stream_name): return 'control "' + stream_name + '" play' class response(object): WRONGPASS = 'Wrong password' AUTHOK = 'Welcome, Master' BROADCASTEXISTS = 'Name already in use' SYNTAXERR = 'Wrong command syntax' STARTOK = 'new' STOPOK = 'del' STOPERR = 'media unknown' SHUTDOWN = 'Bye-bye!'
''' Minimal VLC client for AceProxy. Messages class. ''' class VlcMessage(object): class request(object): SHUTDOWN = 'shutdown' @staticmethod def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''): return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \ 'http{mux=' + muxer + ',dst=:' + \ str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \ "\r\n" + 'control "' + stream_name + '" play' @staticmethod def stopBroadcast(stream_name): return 'del "' + stream_name + '"' @staticmethod def pauseBroadcast(stream_name): return 'control "' + stream_name + '" pause' @staticmethod def unPauseBroadcast(stream_name): return 'control "' + stream_name + '" play' class response(object): WRONGPASS = 'Wrong password' AUTHOK = 'Welcome, Master' BROADCASTEXISTS = 'Name already in use' SYNTAXERR = 'Wrong command syntax' STARTOK = 'new' STOPOK = 'del' STOPERR = 'media unknown' SHUTDOWN = 'Bye-bye!'
Include all audio, video and subtitles streams
Include all audio, video and subtitles streams
Python
mit
deseven/aceproxy,pepsik-kiev/aceproxy,cosynus/python,Ivshti/aceproxy,ValdikSS/aceproxy
python
## Code Before: ''' Minimal VLC client for AceProxy. Messages class. ''' class VlcMessage(object): class request(object): SHUTDOWN = 'shutdown' @staticmethod def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''): return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \ 'http{mux=' + muxer + ',dst=:' + \ str(out_port) + '/' + stream_name + '} enabled' + \ "\r\n" + 'control "' + stream_name + '" play' @staticmethod def stopBroadcast(stream_name): return 'del "' + stream_name + '"' @staticmethod def pauseBroadcast(stream_name): return 'control "' + stream_name + '" pause' @staticmethod def unPauseBroadcast(stream_name): return 'control "' + stream_name + '" play' class response(object): WRONGPASS = 'Wrong password' AUTHOK = 'Welcome, Master' BROADCASTEXISTS = 'Name already in use' SYNTAXERR = 'Wrong command syntax' STARTOK = 'new' STOPOK = 'del' STOPERR = 'media unknown' SHUTDOWN = 'Bye-bye!' ## Instruction: Include all audio, video and subtitles streams ## Code After: ''' Minimal VLC client for AceProxy. Messages class. ''' class VlcMessage(object): class request(object): SHUTDOWN = 'shutdown' @staticmethod def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''): return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \ 'http{mux=' + muxer + ',dst=:' + \ str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \ "\r\n" + 'control "' + stream_name + '" play' @staticmethod def stopBroadcast(stream_name): return 'del "' + stream_name + '"' @staticmethod def pauseBroadcast(stream_name): return 'control "' + stream_name + '" pause' @staticmethod def unPauseBroadcast(stream_name): return 'control "' + stream_name + '" play' class response(object): WRONGPASS = 'Wrong password' AUTHOK = 'Welcome, Master' BROADCASTEXISTS = 'Name already in use' SYNTAXERR = 'Wrong command syntax' STARTOK = 'new' STOPOK = 'del' STOPERR = 'media unknown' SHUTDOWN = 'Bye-bye!'
// ... existing code ... def startBroadcast(stream_name, input, out_port, muxer='ts', pre_access=''): return 'new "' + stream_name + '" broadcast input "' + input + '" output ' + (pre_access + ':' if pre_access else '#') + \ 'http{mux=' + muxer + ',dst=:' + \ str(out_port) + '/' + stream_name + '} option sout-keep option sout-all enabled' + \ "\r\n" + 'control "' + stream_name + '" play' @staticmethod // ... rest of the code ...
810961f65c37d27c5e2d99cf102064d0b4e300f3
project/apiv2/views.py
project/apiv2/views.py
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
Use ListCreateAPIView as base class to support bookmark creation
Use ListCreateAPIView as base class to support bookmark creation
Python
mit
hnakamur/django-bootstrap-table-example,hnakamur/django-bootstrap-table-example,hnakamur/django-bootstrap-table-example
python
## Code Before: from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id' ## Instruction: Use ListCreateAPIView as base class to support bookmark creation ## Code After: from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
... from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' ...
ea3949ce981b558e47c28ca09a56095368ddf34a
src/memdumper.c
src/memdumper.c
static const char HEX[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', }; void byteToHex(char byte, char* output) { output[0] = HEX[(byte >> 4) & 0x0F]; output[1] = HEX[byte & 0x0F]; } void dumpMemoryAsHex(char* data, char* dataEnd, int count) { static char hexbuff[2]; int i = 0; while(i < count) { if(&data[i] >= dataEnd) break; byteToHex(data[i++], hexbuff); printf("%s ", hexbuff); } /* Print padding */ for( ; i < count; i++) { printf(".. "); } } void dumpMemoryAsASCII(char* data, char* dataEnd, int count) { int i = 0; while(i < count) { char c; if(&data[i] >= dataEnd) break; c = data[i++]; printf("%c", (c < ' ' || c == (char)(127)) ? '.' : c); } /* Print padding */ for( ; i < count; i++) { printf("."); } }
static const char HEX[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', }; void byteToHex(char byte, char* output) { output[0] = HEX[(byte >> 4) & 0x0F]; output[1] = HEX[byte & 0x0F]; } void dumpMemoryAsHex(char* data, char* dataEnd, int count) { static char hexbuff[3] = {0, 0, 0}; int i = 0; while(i < count) { if(&data[i] >= dataEnd) break; byteToHex(data[i++], hexbuff); printf("%s ", hexbuff); } /* Print padding */ for( ; i < count; i++) { printf(".. "); } } void dumpMemoryAsASCII(char* data, char* dataEnd, int count) { int i = 0; while(i < count) { char c; if(&data[i] >= dataEnd) break; c = data[i++]; printf("%c", (c < ' ' || c == (char)(127)) ? '.' : c); } /* Print padding */ for( ; i < count; i++) { printf("."); } }
Make hex string buffer guarantee a null terminator
Make hex string buffer guarantee a null terminator
C
mit
drdanick/apricos-fs-manager
c
## Code Before: static const char HEX[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', }; void byteToHex(char byte, char* output) { output[0] = HEX[(byte >> 4) & 0x0F]; output[1] = HEX[byte & 0x0F]; } void dumpMemoryAsHex(char* data, char* dataEnd, int count) { static char hexbuff[2]; int i = 0; while(i < count) { if(&data[i] >= dataEnd) break; byteToHex(data[i++], hexbuff); printf("%s ", hexbuff); } /* Print padding */ for( ; i < count; i++) { printf(".. "); } } void dumpMemoryAsASCII(char* data, char* dataEnd, int count) { int i = 0; while(i < count) { char c; if(&data[i] >= dataEnd) break; c = data[i++]; printf("%c", (c < ' ' || c == (char)(127)) ? '.' : c); } /* Print padding */ for( ; i < count; i++) { printf("."); } } ## Instruction: Make hex string buffer guarantee a null terminator ## Code After: static const char HEX[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', }; void byteToHex(char byte, char* output) { output[0] = HEX[(byte >> 4) & 0x0F]; output[1] = HEX[byte & 0x0F]; } void dumpMemoryAsHex(char* data, char* dataEnd, int count) { static char hexbuff[3] = {0, 0, 0}; int i = 0; while(i < count) { if(&data[i] >= dataEnd) break; byteToHex(data[i++], hexbuff); printf("%s ", hexbuff); } /* Print padding */ for( ; i < count; i++) { printf(".. "); } } void dumpMemoryAsASCII(char* data, char* dataEnd, int count) { int i = 0; while(i < count) { char c; if(&data[i] >= dataEnd) break; c = data[i++]; printf("%c", (c < ' ' || c == (char)(127)) ? '.' : c); } /* Print padding */ for( ; i < count; i++) { printf("."); } }
# ... existing code ... } void dumpMemoryAsHex(char* data, char* dataEnd, int count) { static char hexbuff[3] = {0, 0, 0}; int i = 0; while(i < count) { if(&data[i] >= dataEnd) # ... rest of the code ...
75a598e2b9cf237448cd1b1934d3d58d093808ec
server/scraper/util.py
server/scraper/util.py
import os import re import sys import json def parse_money(moneystring): # Sometimes 0 is O :( moneystring = moneystring.replace("O", "0") return re.sub("[^0-9,]", "", str(moneystring)).replace(',', '.') def stderr_print(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def write_json_to_file(obj, path): """ Write an object to JSON at the specified path. """ directory = os.path.dirname(path) os.makedirs(directory, exist_ok=True) with open(path, mode='w') as f: json.dump(obj, f, sort_keys=True) def split_price(meal): price = meal.split('-')[-1].strip() name = '-'.join(meal.split('-')[:-1]).strip() return name, price
import os import re import sys import json def parse_money(moneystring): # Sometimes 0 is O :( moneystring = moneystring.replace("O", "0") return re.sub("[^0-9,]", "", str(moneystring)).replace(',', '.') def stderr_print(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def write_json_to_file(obj, path): """ Write an object to JSON at the specified path. """ directory = os.path.dirname(path) os.makedirs(directory, exist_ok=True) with open(path, mode='w') as f: json.dump(obj, f, sort_keys=True) def split_price(meal): if "-" in meal: price = meal.split('-')[-1].strip() name = '-'.join(meal.split('-')[:-1]).strip() return name, price else: return meal.strip(), ""
Fix price in de brug
Fix price in de brug
Python
mit
ZeusWPI/hydra,ZeusWPI/hydra,ZeusWPI/hydra
python
## Code Before: import os import re import sys import json def parse_money(moneystring): # Sometimes 0 is O :( moneystring = moneystring.replace("O", "0") return re.sub("[^0-9,]", "", str(moneystring)).replace(',', '.') def stderr_print(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def write_json_to_file(obj, path): """ Write an object to JSON at the specified path. """ directory = os.path.dirname(path) os.makedirs(directory, exist_ok=True) with open(path, mode='w') as f: json.dump(obj, f, sort_keys=True) def split_price(meal): price = meal.split('-')[-1].strip() name = '-'.join(meal.split('-')[:-1]).strip() return name, price ## Instruction: Fix price in de brug ## Code After: import os import re import sys import json def parse_money(moneystring): # Sometimes 0 is O :( moneystring = moneystring.replace("O", "0") return re.sub("[^0-9,]", "", str(moneystring)).replace(',', '.') def stderr_print(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def write_json_to_file(obj, path): """ Write an object to JSON at the specified path. """ directory = os.path.dirname(path) os.makedirs(directory, exist_ok=True) with open(path, mode='w') as f: json.dump(obj, f, sort_keys=True) def split_price(meal): if "-" in meal: price = meal.split('-')[-1].strip() name = '-'.join(meal.split('-')[:-1]).strip() return name, price else: return meal.strip(), ""
# ... existing code ... def split_price(meal): if "-" in meal: price = meal.split('-')[-1].strip() name = '-'.join(meal.split('-')[:-1]).strip() return name, price else: return meal.strip(), "" # ... rest of the code ...
010040a8f7cb6a7a60b88ae80c43198fc46594d9
tests/test_integration.py
tests/test_integration.py
import os from unittest import TestCase from yoconfigurator.base import read_config from yoconfig import configure_services from pycloudflare.services import CloudFlareService app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) conf = read_config(app_dir) class ZonesTest(TestCase): def setUp(self): configure_services('cloudflare', ['cloudflare'], conf.common) self.cloudflare = CloudFlareService() def test_get_all_zones(self): zones = self.cloudflare.get_zones() self.assertIsInstance(zones, list) def test_get_zone(self): zone_id = self.cloudflare.get_zones()[0]['id'] zone = self.cloudflare.get_zone(zone_id) self.assertIsInstance(zone, dict)
import os import types from unittest import TestCase from yoconfigurator.base import read_config from yoconfig import configure_services from pycloudflare.services import CloudFlareService app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) conf = read_config(app_dir) class ZonesTest(TestCase): def setUp(self): configure_services('cloudflare', ['cloudflare'], conf.common) self.cloudflare = CloudFlareService() def test_get_all_zones(self): zones = self.cloudflare.iter_zones() self.assertIsInstance(zones, types.GeneratorType) def test_get_zone(self): zone_id = self.cloudflare.get_zones()[0]['id'] zone = self.cloudflare.get_zone(zone_id) self.assertIsInstance(zone, dict)
Test iter_zones instead of get_zones
Test iter_zones instead of get_zones
Python
mit
yola/pycloudflare,gnowxilef/pycloudflare
python
## Code Before: import os from unittest import TestCase from yoconfigurator.base import read_config from yoconfig import configure_services from pycloudflare.services import CloudFlareService app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) conf = read_config(app_dir) class ZonesTest(TestCase): def setUp(self): configure_services('cloudflare', ['cloudflare'], conf.common) self.cloudflare = CloudFlareService() def test_get_all_zones(self): zones = self.cloudflare.get_zones() self.assertIsInstance(zones, list) def test_get_zone(self): zone_id = self.cloudflare.get_zones()[0]['id'] zone = self.cloudflare.get_zone(zone_id) self.assertIsInstance(zone, dict) ## Instruction: Test iter_zones instead of get_zones ## Code After: import os import types from unittest import TestCase from yoconfigurator.base import read_config from yoconfig import configure_services from pycloudflare.services import CloudFlareService app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) conf = read_config(app_dir) class ZonesTest(TestCase): def setUp(self): configure_services('cloudflare', ['cloudflare'], conf.common) self.cloudflare = CloudFlareService() def test_get_all_zones(self): zones = self.cloudflare.iter_zones() self.assertIsInstance(zones, types.GeneratorType) def test_get_zone(self): zone_id = self.cloudflare.get_zones()[0]['id'] zone = self.cloudflare.get_zone(zone_id) self.assertIsInstance(zone, dict)
// ... existing code ... import os import types from unittest import TestCase from yoconfigurator.base import read_config // ... modified code ... self.cloudflare = CloudFlareService() def test_get_all_zones(self): zones = self.cloudflare.iter_zones() self.assertIsInstance(zones, types.GeneratorType) def test_get_zone(self): zone_id = self.cloudflare.get_zones()[0]['id'] // ... rest of the code ...
12f835d9060decfc675c81f7a1499b373b78f4cc
TrevorNet/tests/test_idx.py
TrevorNet/tests/test_idx.py
from .. import idx import os def test__count_dimensions(): yield check__count_dimensions, 9, 0 yield check__count_dimensions, [1, 2], 1 yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2 yield check__count_dimensions, [[[1,2], [2]]], 3 def check__count_dimensions(lst, i): assert idx._count_dimensions(lst) == i # these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/ _somelist = [[1, 2], [3, 4]] _somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04' def test_list_to_idx(): data = idx.list_to_idx(_somelist, 'i') assert data == _somebytes def test_idx_to_list(): lst = idx.idx_to_list(_somebytes) assert lst == _somelist
from .. import idx import os def test__count_dimensions(): yield check__count_dimensions, 9, 0 yield check__count_dimensions, [1, 2], 1 yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2 yield check__count_dimensions, [[[1,2], [2]]], 3 def check__count_dimensions(lst, i): assert idx._count_dimensions(lst) == i # these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/ _somelist = [[1, 2], [3, 4]] def _get_somebytes(): header = b'\x00\x00\x0C\x02' dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02' data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02' data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04' return header + dimensionsizes + data _somebytes = _get_somebytes() def test_list_to_idx(): data = idx.list_to_idx(_somelist, 'i') print(data, _somebytes) assert data == _somebytes def test_idx_to_list(): lst = idx.idx_to_list(_somebytes) assert lst == _somelist
Fix issue where idx test uses wrong bytes object
Fix issue where idx test uses wrong bytes object Forgot to include the sizes of each dimension
Python
mit
tmerr/trevornet
python
## Code Before: from .. import idx import os def test__count_dimensions(): yield check__count_dimensions, 9, 0 yield check__count_dimensions, [1, 2], 1 yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2 yield check__count_dimensions, [[[1,2], [2]]], 3 def check__count_dimensions(lst, i): assert idx._count_dimensions(lst) == i # these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/ _somelist = [[1, 2], [3, 4]] _somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04' def test_list_to_idx(): data = idx.list_to_idx(_somelist, 'i') assert data == _somebytes def test_idx_to_list(): lst = idx.idx_to_list(_somebytes) assert lst == _somelist ## Instruction: Fix issue where idx test uses wrong bytes object Forgot to include the sizes of each dimension ## Code After: from .. import idx import os def test__count_dimensions(): yield check__count_dimensions, 9, 0 yield check__count_dimensions, [1, 2], 1 yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2 yield check__count_dimensions, [[[1,2], [2]]], 3 def check__count_dimensions(lst, i): assert idx._count_dimensions(lst) == i # these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/ _somelist = [[1, 2], [3, 4]] def _get_somebytes(): header = b'\x00\x00\x0C\x02' dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02' data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02' data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04' return header + dimensionsizes + data _somebytes = _get_somebytes() def test_list_to_idx(): data = idx.list_to_idx(_somelist, 'i') print(data, _somebytes) assert data == _somebytes def test_idx_to_list(): lst = idx.idx_to_list(_somebytes) assert lst == _somelist
// ... existing code ... # these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/ _somelist = [[1, 2], [3, 4]] def _get_somebytes(): header = b'\x00\x00\x0C\x02' dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02' data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02' data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04' return header + dimensionsizes + data _somebytes = _get_somebytes() def test_list_to_idx(): data = idx.list_to_idx(_somelist, 'i') print(data, _somebytes) assert data == _somebytes def test_idx_to_list(): // ... rest of the code ...
3736b02d3b0004809bafb0a40625e26caffc1beb
opal/ddd.py
opal/ddd.py
from django.conf import settings import requests CHANGE_ENDPOINT = settings.DDD_ENDPOINT + 'change/' OUR_ENDPOINT = settings.DEFAULT_DOMAIN + '/ddd/' def change(pre, post): r = requests.post( CHANGE_ENDPOINT, params={'pre': pre, 'post': post, 'endpoint': OUR_ENDPOINT} ) print r.status_code print r.text return
from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder import json import requests CHANGE_ENDPOINT = settings.DDD_ENDPOINT + 'change/' OUR_ENDPOINT = settings.DEFAULT_DOMAIN + '/ddd/' def change(pre, post): payload = { 'pre': json.dumps(pre, cls=DjangoJSONEncoder), 'post': json.dumps(post, cls=DjangoJSONEncoder), 'endpoint': OUR_ENDPOINT } print payload r = requests.post( CHANGE_ENDPOINT, data=payload ) print 'status', r.status_code print 'text', r.text return
Use the real send POST params keyword.
Use the real send POST params keyword.
Python
agpl-3.0
khchine5/opal,khchine5/opal,khchine5/opal
python
## Code Before: from django.conf import settings import requests CHANGE_ENDPOINT = settings.DDD_ENDPOINT + 'change/' OUR_ENDPOINT = settings.DEFAULT_DOMAIN + '/ddd/' def change(pre, post): r = requests.post( CHANGE_ENDPOINT, params={'pre': pre, 'post': post, 'endpoint': OUR_ENDPOINT} ) print r.status_code print r.text return ## Instruction: Use the real send POST params keyword. ## Code After: from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder import json import requests CHANGE_ENDPOINT = settings.DDD_ENDPOINT + 'change/' OUR_ENDPOINT = settings.DEFAULT_DOMAIN + '/ddd/' def change(pre, post): payload = { 'pre': json.dumps(pre, cls=DjangoJSONEncoder), 'post': json.dumps(post, cls=DjangoJSONEncoder), 'endpoint': OUR_ENDPOINT } print payload r = requests.post( CHANGE_ENDPOINT, data=payload ) print 'status', r.status_code print 'text', r.text return
# ... existing code ... from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder import json import requests CHANGE_ENDPOINT = settings.DDD_ENDPOINT + 'change/' # ... modified code ... OUR_ENDPOINT = settings.DEFAULT_DOMAIN + '/ddd/' def change(pre, post): payload = { 'pre': json.dumps(pre, cls=DjangoJSONEncoder), 'post': json.dumps(post, cls=DjangoJSONEncoder), 'endpoint': OUR_ENDPOINT } print payload r = requests.post( CHANGE_ENDPOINT, data=payload ) print 'status', r.status_code print 'text', r.text return # ... rest of the code ...
9f48c98cd35fdbffe738e583f3ffe2f4bb971175
app/src/main/java/com/cmput301/cia/activities/HabitEventViewActivity.java
app/src/main/java/com/cmput301/cia/activities/HabitEventViewActivity.java
/* * Copyright (c) 2017 CMPUT301F17T15. This project is distributed under the MIT license. */ package com.cmput301.cia.activities; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.widget.EditText; import android.widget.TextView; import com.cmput301.cia.R; /** * created by Tinghui */ public class HabitEventViewActivity extends AppCompatActivity { private TextView habitEventName; private TextView habitEventType; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_habit_event_view); habitEventName = (EditText) findViewById(R.id.EditHabitEventName); habitEventType = (EditText) findViewById(R.id.EditHabitEventType); Bundle bundle = getIntent().getExtras(); habitEventType.setText(bundle.getString("HabitEventType")); habitEventName.setText(bundle.getString("HabitEventName")); } }
/* * Copyright (c) 2017 CMPUT301F17T15. This project is distributed under the MIT license. */ package com.cmput301.cia.activities; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.widget.EditText; import android.widget.TextView; import com.cmput301.cia.R; /** * created by Tinghui. * on 11/8/2017. */ public class HabitEventViewActivity extends AppCompatActivity { private TextView habitEventName; private TextView habitEventType; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_habit_event_view); habitEventName = (EditText) findViewById(R.id.EditHabitEventName); habitEventType = (EditText) findViewById(R.id.EditHabitEventType); Bundle bundle = getIntent().getExtras(); habitEventType.setText(bundle.getString("HabitEventType")); habitEventName.setText(bundle.getString("HabitEventName")); } }
Add habit event view list activity
Add habit event view list activity
Java
mit
CMPUT301F17T15/CIA
java
## Code Before: /* * Copyright (c) 2017 CMPUT301F17T15. This project is distributed under the MIT license. */ package com.cmput301.cia.activities; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.widget.EditText; import android.widget.TextView; import com.cmput301.cia.R; /** * created by Tinghui */ public class HabitEventViewActivity extends AppCompatActivity { private TextView habitEventName; private TextView habitEventType; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_habit_event_view); habitEventName = (EditText) findViewById(R.id.EditHabitEventName); habitEventType = (EditText) findViewById(R.id.EditHabitEventType); Bundle bundle = getIntent().getExtras(); habitEventType.setText(bundle.getString("HabitEventType")); habitEventName.setText(bundle.getString("HabitEventName")); } } ## Instruction: Add habit event view list activity ## Code After: /* * Copyright (c) 2017 CMPUT301F17T15. This project is distributed under the MIT license. */ package com.cmput301.cia.activities; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.widget.EditText; import android.widget.TextView; import com.cmput301.cia.R; /** * created by Tinghui. * on 11/8/2017. */ public class HabitEventViewActivity extends AppCompatActivity { private TextView habitEventName; private TextView habitEventType; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_habit_event_view); habitEventName = (EditText) findViewById(R.id.EditHabitEventName); habitEventType = (EditText) findViewById(R.id.EditHabitEventType); Bundle bundle = getIntent().getExtras(); habitEventType.setText(bundle.getString("HabitEventType")); habitEventName.setText(bundle.getString("HabitEventName")); } }
# ... existing code ... import com.cmput301.cia.R; /** * created by Tinghui. * on 11/8/2017. */ public class HabitEventViewActivity extends AppCompatActivity { # ... modified code ... setContentView(R.layout.activity_habit_event_view); habitEventName = (EditText) findViewById(R.id.EditHabitEventName); habitEventType = (EditText) findViewById(R.id.EditHabitEventType); Bundle bundle = getIntent().getExtras(); habitEventType.setText(bundle.getString("HabitEventType")); habitEventName.setText(bundle.getString("HabitEventName")); # ... rest of the code ...
2713c59813cea9dd5d5787e4a21aa05471bfdb44
aztec/src/main/kotlin/org/wordpress/aztec/plugins/IToolbarButton.kt
aztec/src/main/kotlin/org/wordpress/aztec/plugins/IToolbarButton.kt
package org.wordpress.aztec.plugins import android.content.Context import android.graphics.drawable.Drawable import android.view.KeyEvent import android.view.View import android.view.ViewGroup import org.wordpress.aztec.ITextFormat import org.wordpress.aztec.toolbar.AztecToolbar import org.wordpress.aztec.toolbar.IToolbarAction import org.wordpress.aztec.toolbar.RippleToggleButton interface IToolbarButton : IAztecPlugin { val action: IToolbarAction val context: Context fun toggle() fun matchesKeyShortcut(keyCode: Int, event: KeyEvent): Boolean { return false } fun inflateButton(parent: ViewGroup) }
package org.wordpress.aztec.plugins import android.content.Context import android.graphics.drawable.Drawable import android.view.KeyEvent import android.view.View import android.view.ViewGroup import org.wordpress.aztec.ITextFormat import org.wordpress.aztec.toolbar.AztecToolbar import org.wordpress.aztec.toolbar.IToolbarAction import org.wordpress.aztec.toolbar.RippleToggleButton /** * An interface for implementing toolbar plugins. * * @property action the toolbar action type. * @property context the Android context. */ interface IToolbarButton : IAztecPlugin { val action: IToolbarAction val context: Context /** * Toggles a particular style. * * This method is called when the associated toolbar button is tapped or key shortcut is pressed. */ fun toggle() /** * Determines, whether a particular key shortcut should trigger the toolbar action. * * @return true, if the key combination matches the action shortcut, false otherwise. */ fun matchesKeyShortcut(keyCode: Int, event: KeyEvent): Boolean { return false } /** * A callback method used during the toolbar initialization. * * The implementor is responsible for implementing the inflation of a [RippleToggleButton] under the *parent*. * * @param parent view to be the parent of the generated hierarchy. */ fun inflateButton(parent: ViewGroup) }
Add documentation to the toolbar plugin interface
Add documentation to the toolbar plugin interface
Kotlin
mpl-2.0
wordpress-mobile/AztecEditor-Android,wordpress-mobile/AztecEditor-Android,wordpress-mobile/AztecEditor-Android
kotlin
## Code Before: package org.wordpress.aztec.plugins import android.content.Context import android.graphics.drawable.Drawable import android.view.KeyEvent import android.view.View import android.view.ViewGroup import org.wordpress.aztec.ITextFormat import org.wordpress.aztec.toolbar.AztecToolbar import org.wordpress.aztec.toolbar.IToolbarAction import org.wordpress.aztec.toolbar.RippleToggleButton interface IToolbarButton : IAztecPlugin { val action: IToolbarAction val context: Context fun toggle() fun matchesKeyShortcut(keyCode: Int, event: KeyEvent): Boolean { return false } fun inflateButton(parent: ViewGroup) } ## Instruction: Add documentation to the toolbar plugin interface ## Code After: package org.wordpress.aztec.plugins import android.content.Context import android.graphics.drawable.Drawable import android.view.KeyEvent import android.view.View import android.view.ViewGroup import org.wordpress.aztec.ITextFormat import org.wordpress.aztec.toolbar.AztecToolbar import org.wordpress.aztec.toolbar.IToolbarAction import org.wordpress.aztec.toolbar.RippleToggleButton /** * An interface for implementing toolbar plugins. * * @property action the toolbar action type. * @property context the Android context. */ interface IToolbarButton : IAztecPlugin { val action: IToolbarAction val context: Context /** * Toggles a particular style. * * This method is called when the associated toolbar button is tapped or key shortcut is pressed. */ fun toggle() /** * Determines, whether a particular key shortcut should trigger the toolbar action. * * @return true, if the key combination matches the action shortcut, false otherwise. */ fun matchesKeyShortcut(keyCode: Int, event: KeyEvent): Boolean { return false } /** * A callback method used during the toolbar initialization. * * The implementor is responsible for implementing the inflation of a [RippleToggleButton] under the *parent*. * * @param parent view to be the parent of the generated hierarchy. */ fun inflateButton(parent: ViewGroup) }
# ... existing code ... import org.wordpress.aztec.toolbar.IToolbarAction import org.wordpress.aztec.toolbar.RippleToggleButton /** * An interface for implementing toolbar plugins. * * @property action the toolbar action type. * @property context the Android context. */ interface IToolbarButton : IAztecPlugin { val action: IToolbarAction val context: Context /** * Toggles a particular style. * * This method is called when the associated toolbar button is tapped or key shortcut is pressed. */ fun toggle() /** * Determines, whether a particular key shortcut should trigger the toolbar action. * * @return true, if the key combination matches the action shortcut, false otherwise. */ fun matchesKeyShortcut(keyCode: Int, event: KeyEvent): Boolean { return false } /** * A callback method used during the toolbar initialization. * * The implementor is responsible for implementing the inflation of a [RippleToggleButton] under the *parent*. * * @param parent view to be the parent of the generated hierarchy. */ fun inflateButton(parent: ViewGroup) } # ... rest of the code ...
50ab2ed3d8e50e5106dc486e4d20c889d6b18e82
spkg/base/package_database.py
spkg/base/package_database.py
from os.path import split, splitext from json import load f = open("packages.json") data = load(f) g = [] for p in data: pkg = { "name": p["name"], "dependencies": p["dependencies"], "version": p["version"], "download": p["download"], } g.append(pkg) from json import dump from StringIO import StringIO s = StringIO() dump(g, s, sort_keys=True, indent=4) s.seek(0) s = s.read() # Remove the trailing space s = s.replace(" \n", "\n") f = open("packages.json", "w") f.write(s)
from os.path import split, splitext from json import load f = open("packages.json") data = load(f) g = [] for p in data: pkg = { "name": p["name"], "dependencies": p["dependencies"], "version": p["version"], "download": p["download"], } g.append(pkg) from json import dump from StringIO import StringIO s = StringIO() dump(g, s, sort_keys=True, indent=4) s.seek(0) s = s.read() # Remove the trailing space s = s.replace(" \n", "\n") f = open("packages.json", "w") f.write(s) f.write("\n")
Add a new line at the end of the file
Add a new line at the end of the file
Python
bsd-3-clause
qsnake/qsnake,qsnake/qsnake
python
## Code Before: from os.path import split, splitext from json import load f = open("packages.json") data = load(f) g = [] for p in data: pkg = { "name": p["name"], "dependencies": p["dependencies"], "version": p["version"], "download": p["download"], } g.append(pkg) from json import dump from StringIO import StringIO s = StringIO() dump(g, s, sort_keys=True, indent=4) s.seek(0) s = s.read() # Remove the trailing space s = s.replace(" \n", "\n") f = open("packages.json", "w") f.write(s) ## Instruction: Add a new line at the end of the file ## Code After: from os.path import split, splitext from json import load f = open("packages.json") data = load(f) g = [] for p in data: pkg = { "name": p["name"], "dependencies": p["dependencies"], "version": p["version"], "download": p["download"], } g.append(pkg) from json import dump from StringIO import StringIO s = StringIO() dump(g, s, sort_keys=True, indent=4) s.seek(0) s = s.read() # Remove the trailing space s = s.replace(" \n", "\n") f = open("packages.json", "w") f.write(s) f.write("\n")
... s = s.replace(" \n", "\n") f = open("packages.json", "w") f.write(s) f.write("\n") ...
56902792b2a7fdd25bd64781e9e98a63db2ee348
all/__init__.py
all/__init__.py
from .help import HyperHelpCommand, HyperHelpNavigateCommand from .help import HyperHelpListener ###----------------------------------------------------------------------------
__version_tuple = (1, 0, 0) __version__ = ".".join([str(num) for num in __version_tuple]) # These are exposed to Sublime to implement the core of the help system. from .help import HyperHelpCommand, HyperHelpNavigateCommand from .help import HyperHelpListener # These are exposed to packages that may want to interface with the hyperhelp # core for use in their own packages. from .operations import package_help_scan ###---------------------------------------------------------------------------- def version(): """ Get the currently installed version of hyperhelp as a tuple. """ return __version_tuple ###----------------------------------------------------------------------------
Include a package version number
Include a package version number This includes in the core package the concept of a version number that underlying code could use to determine what version of the core it is interfacing with. This is only really needed for packages that get at the underlying core code in hyperhelp, which at the moment would only be the companion HyperHelpAuthor package. To this end (as an experiment) the code for loading in the help index files is exposed to anyone that wants to import it as a test for how this will eventually work. In particular, the idea is to put all of the symbols meant to be accessible to outside code into the hyperhelp.all module namespace (whicn is unfortunate but there seems to be no satisfactory way around it).
Python
mit
OdatNurd/hyperhelp
python
## Code Before: from .help import HyperHelpCommand, HyperHelpNavigateCommand from .help import HyperHelpListener ###---------------------------------------------------------------------------- ## Instruction: Include a package version number This includes in the core package the concept of a version number that underlying code could use to determine what version of the core it is interfacing with. This is only really needed for packages that get at the underlying core code in hyperhelp, which at the moment would only be the companion HyperHelpAuthor package. To this end (as an experiment) the code for loading in the help index files is exposed to anyone that wants to import it as a test for how this will eventually work. In particular, the idea is to put all of the symbols meant to be accessible to outside code into the hyperhelp.all module namespace (whicn is unfortunate but there seems to be no satisfactory way around it). ## Code After: __version_tuple = (1, 0, 0) __version__ = ".".join([str(num) for num in __version_tuple]) # These are exposed to Sublime to implement the core of the help system. from .help import HyperHelpCommand, HyperHelpNavigateCommand from .help import HyperHelpListener # These are exposed to packages that may want to interface with the hyperhelp # core for use in their own packages. from .operations import package_help_scan ###---------------------------------------------------------------------------- def version(): """ Get the currently installed version of hyperhelp as a tuple. """ return __version_tuple ###----------------------------------------------------------------------------
... __version_tuple = (1, 0, 0) __version__ = ".".join([str(num) for num in __version_tuple]) # These are exposed to Sublime to implement the core of the help system. from .help import HyperHelpCommand, HyperHelpNavigateCommand from .help import HyperHelpListener # These are exposed to packages that may want to interface with the hyperhelp # core for use in their own packages. from .operations import package_help_scan ###---------------------------------------------------------------------------- def version(): """ Get the currently installed version of hyperhelp as a tuple. """ return __version_tuple ###---------------------------------------------------------------------------- ...
76dcb98842c92b8f84355f17348bb4208e5ff319
src/main/java/org/frawa/elmtest/run/ElmTestConfigurationFactory.java
src/main/java/org/frawa/elmtest/run/ElmTestConfigurationFactory.java
package org.frawa.elmtest.run; import com.intellij.execution.configurations.ConfigurationFactory; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.IconLoader; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class ElmTestConfigurationFactory extends ConfigurationFactory { private static final String FACTORY_NAME = "Elm Test configuration factory"; static final Icon RUN_ICON = IconLoader.getIcon("/icons/elm-colorful-original.svg"); ElmTestConfigurationFactory(ConfigurationType type) { super(type); } @NotNull @Override public RunConfiguration createTemplateConfiguration(@NotNull Project project) { return new ElmTestRunConfiguration(project, this, "Elm Test"); } @Override public String getName() { return FACTORY_NAME; } @Nullable @Override public Icon getIcon() { return RUN_ICON; } }
package org.frawa.elmtest.run; import com.intellij.execution.configurations.ConfigurationFactory; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.openapi.project.Project; import org.elm.ide.icons.ElmIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class ElmTestConfigurationFactory extends ConfigurationFactory { private static final String FACTORY_NAME = "Elm Test configuration factory"; static final Icon RUN_ICON = ElmIcons.INSTANCE.getCOLORFUL(); ElmTestConfigurationFactory(ConfigurationType type) { super(type); } @NotNull @Override public RunConfiguration createTemplateConfiguration(@NotNull Project project) { return new ElmTestRunConfiguration(project, this, "Elm Test"); } @Override public String getName() { return FACTORY_NAME; } @Nullable @Override public Icon getIcon() { return RUN_ICON; } }
Fix icon (SVG was being rendered HUGE in WebStorm)
Fix icon (SVG was being rendered HUGE in WebStorm)
Java
mit
klazuka/intellij-elm,klazuka/intellij-elm
java
## Code Before: package org.frawa.elmtest.run; import com.intellij.execution.configurations.ConfigurationFactory; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.IconLoader; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class ElmTestConfigurationFactory extends ConfigurationFactory { private static final String FACTORY_NAME = "Elm Test configuration factory"; static final Icon RUN_ICON = IconLoader.getIcon("/icons/elm-colorful-original.svg"); ElmTestConfigurationFactory(ConfigurationType type) { super(type); } @NotNull @Override public RunConfiguration createTemplateConfiguration(@NotNull Project project) { return new ElmTestRunConfiguration(project, this, "Elm Test"); } @Override public String getName() { return FACTORY_NAME; } @Nullable @Override public Icon getIcon() { return RUN_ICON; } } ## Instruction: Fix icon (SVG was being rendered HUGE in WebStorm) ## Code After: package org.frawa.elmtest.run; import com.intellij.execution.configurations.ConfigurationFactory; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.openapi.project.Project; import org.elm.ide.icons.ElmIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class ElmTestConfigurationFactory extends ConfigurationFactory { private static final String FACTORY_NAME = "Elm Test configuration factory"; static final Icon RUN_ICON = ElmIcons.INSTANCE.getCOLORFUL(); ElmTestConfigurationFactory(ConfigurationType type) { super(type); } @NotNull @Override public RunConfiguration createTemplateConfiguration(@NotNull Project project) { return new ElmTestRunConfiguration(project, this, "Elm Test"); } @Override public String getName() { return FACTORY_NAME; } @Nullable @Override public Icon getIcon() { return RUN_ICON; } }
// ... existing code ... import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.openapi.project.Project; import org.elm.ide.icons.ElmIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; // ... modified code ... private static final String FACTORY_NAME = "Elm Test configuration factory"; static final Icon RUN_ICON = ElmIcons.INSTANCE.getCOLORFUL(); ElmTestConfigurationFactory(ConfigurationType type) { super(type); // ... rest of the code ...
cbb59747af48ae60473f27b6de976a08a741ab54
tests/test_test_utils.py
tests/test_test_utils.py
from itertools import product from unittest import TestCase from zipline.utils.test_utils import parameter_space class TestParameterSpace(TestCase): x_args = [1, 2] y_args = [3, 4] @classmethod def setUpClass(cls): cls.xy_invocations = [] @classmethod def tearDownClass(cls): # This is the only actual test here. assert cls.xy_invocations == list(product(cls.x_args, cls.y_args)) @parameter_space(x=x_args, y=y_args) def test_xy(self, x, y): self.xy_invocations.append((x, y)) def test_nothing(self): # Ensure that there's at least one "real" test in the class, or else # our {setUp,tearDown}Class won't be called if, for example, # `parameter_space` returns None. pass
from itertools import product from unittest import TestCase from zipline.utils.test_utils import parameter_space class TestParameterSpace(TestCase): x_args = [1, 2] y_args = [3, 4] @classmethod def setUpClass(cls): cls.xy_invocations = [] cls.yx_invocations = [] @classmethod def tearDownClass(cls): # This is the only actual test here. assert cls.xy_invocations == list(product(cls.x_args, cls.y_args)) assert cls.yx_invocations == list(product(cls.y_args, cls.x_args)) @parameter_space(x=x_args, y=y_args) def test_xy(self, x, y): self.xy_invocations.append((x, y)) @parameter_space(x=x_args, y=y_args) def test_yx(self, y, x): # Ensure that product is called with args in the order that they appear # in the function's parameter list. self.yx_invocations.append((y, x)) def test_nothing(self): # Ensure that there's at least one "real" test in the class, or else # our {setUp,tearDown}Class won't be called if, for example, # `parameter_space` returns None. pass
Add test for parameter_space ordering.
TEST: Add test for parameter_space ordering.
Python
apache-2.0
magne-max/zipline-ja,florentchandelier/zipline,Scapogo/zipline,florentchandelier/zipline,bartosh/zipline,wilsonkichoi/zipline,bartosh/zipline,alphaBenj/zipline,wilsonkichoi/zipline,humdings/zipline,humdings/zipline,umuzungu/zipline,alphaBenj/zipline,enigmampc/catalyst,enigmampc/catalyst,magne-max/zipline-ja,quantopian/zipline,Scapogo/zipline,umuzungu/zipline,quantopian/zipline
python
## Code Before: from itertools import product from unittest import TestCase from zipline.utils.test_utils import parameter_space class TestParameterSpace(TestCase): x_args = [1, 2] y_args = [3, 4] @classmethod def setUpClass(cls): cls.xy_invocations = [] @classmethod def tearDownClass(cls): # This is the only actual test here. assert cls.xy_invocations == list(product(cls.x_args, cls.y_args)) @parameter_space(x=x_args, y=y_args) def test_xy(self, x, y): self.xy_invocations.append((x, y)) def test_nothing(self): # Ensure that there's at least one "real" test in the class, or else # our {setUp,tearDown}Class won't be called if, for example, # `parameter_space` returns None. pass ## Instruction: TEST: Add test for parameter_space ordering. ## Code After: from itertools import product from unittest import TestCase from zipline.utils.test_utils import parameter_space class TestParameterSpace(TestCase): x_args = [1, 2] y_args = [3, 4] @classmethod def setUpClass(cls): cls.xy_invocations = [] cls.yx_invocations = [] @classmethod def tearDownClass(cls): # This is the only actual test here. assert cls.xy_invocations == list(product(cls.x_args, cls.y_args)) assert cls.yx_invocations == list(product(cls.y_args, cls.x_args)) @parameter_space(x=x_args, y=y_args) def test_xy(self, x, y): self.xy_invocations.append((x, y)) @parameter_space(x=x_args, y=y_args) def test_yx(self, y, x): # Ensure that product is called with args in the order that they appear # in the function's parameter list. self.yx_invocations.append((y, x)) def test_nothing(self): # Ensure that there's at least one "real" test in the class, or else # our {setUp,tearDown}Class won't be called if, for example, # `parameter_space` returns None. pass
// ... existing code ... @classmethod def setUpClass(cls): cls.xy_invocations = [] cls.yx_invocations = [] @classmethod def tearDownClass(cls): # This is the only actual test here. assert cls.xy_invocations == list(product(cls.x_args, cls.y_args)) assert cls.yx_invocations == list(product(cls.y_args, cls.x_args)) @parameter_space(x=x_args, y=y_args) def test_xy(self, x, y): self.xy_invocations.append((x, y)) @parameter_space(x=x_args, y=y_args) def test_yx(self, y, x): # Ensure that product is called with args in the order that they appear # in the function's parameter list. self.yx_invocations.append((y, x)) def test_nothing(self): # Ensure that there's at least one "real" test in the class, or else // ... rest of the code ...
61ffbc5697130918aa600a031ab48ea76791acb9
server/session_context.h
server/session_context.h
namespace traffic { class SessionContext : public RequestVisitor { private: std::unique_ptr<ReplyMessage> _message; DataProvider::ptr_t _data_provider; protected: void visit(StatisticRequest const &request); void visit(SummaryRequest const &request); void visit(ErrorRequest const &request); public: bool process_data(void const *data, size_t const size); void encode_result(std::string &out); SessionContext(DataProvider::ptr_t provider); virtual ~SessionContext() { } }; } #endif
namespace traffic { /** * \brief This is the context for a request/reply session. * * This context is allocated for each request to process the data. It represents * the glue between the server, the messages and the backend. It uses the * RequestMessage interface to parse data to a request, implements the * RequestVisitor interface to select the correct backend method to query and * provide a interface to serialize the result to the wire format. */ class SessionContext : public RequestVisitor { private: std::unique_ptr<ReplyMessage> _message; DataProvider::ptr_t _data_provider; protected: void visit(StatisticRequest const &request); void visit(SummaryRequest const &request); void visit(ErrorRequest const &request); public: /** * \brief Process raw request data in wire format. * * This is the entry point for the context to process its request. It * gets the wire data of the request, transforms it to a request * instance and give it to the backend. The result from the backend will * be stored internally. * * \param data The pointer to the raw request data. * \param size The size of the request data. * \return false in case of error. */ bool process_data(void const *data, size_t const size); /** * \brief Encode the current reply data to the wire format. * * This serialize the current state (result from the backend or error * message) to the reply wire format and put the result into the given * string reference. * * \param out The string to write the result to. */ void encode_result(std::string &out); /** * \brief Creat a session context. * * This is done for every incomming data packet. * * \param provider The DataProvider to use for this request. */ SessionContext(DataProvider::ptr_t provider); virtual ~SessionContext() { } }; } #endif
Add documentation to the SessionContext
Add documentation to the SessionContext Signed-off-by: Jan Losinski <[email protected]>
C
bsd-3-clause
agdsn/traffic-service-server,agdsn/traffic-service-server
c
## Code Before: namespace traffic { class SessionContext : public RequestVisitor { private: std::unique_ptr<ReplyMessage> _message; DataProvider::ptr_t _data_provider; protected: void visit(StatisticRequest const &request); void visit(SummaryRequest const &request); void visit(ErrorRequest const &request); public: bool process_data(void const *data, size_t const size); void encode_result(std::string &out); SessionContext(DataProvider::ptr_t provider); virtual ~SessionContext() { } }; } #endif ## Instruction: Add documentation to the SessionContext Signed-off-by: Jan Losinski <[email protected]> ## Code After: namespace traffic { /** * \brief This is the context for a request/reply session. * * This context is allocated for each request to process the data. It represents * the glue between the server, the messages and the backend. It uses the * RequestMessage interface to parse data to a request, implements the * RequestVisitor interface to select the correct backend method to query and * provide a interface to serialize the result to the wire format. */ class SessionContext : public RequestVisitor { private: std::unique_ptr<ReplyMessage> _message; DataProvider::ptr_t _data_provider; protected: void visit(StatisticRequest const &request); void visit(SummaryRequest const &request); void visit(ErrorRequest const &request); public: /** * \brief Process raw request data in wire format. * * This is the entry point for the context to process its request. It * gets the wire data of the request, transforms it to a request * instance and give it to the backend. The result from the backend will * be stored internally. * * \param data The pointer to the raw request data. * \param size The size of the request data. * \return false in case of error. */ bool process_data(void const *data, size_t const size); /** * \brief Encode the current reply data to the wire format. * * This serialize the current state (result from the backend or error * message) to the reply wire format and put the result into the given * string reference. * * \param out The string to write the result to. */ void encode_result(std::string &out); /** * \brief Creat a session context. * * This is done for every incomming data packet. * * \param provider The DataProvider to use for this request. */ SessionContext(DataProvider::ptr_t provider); virtual ~SessionContext() { } }; } #endif
... namespace traffic { /** * \brief This is the context for a request/reply session. * * This context is allocated for each request to process the data. It represents * the glue between the server, the messages and the backend. It uses the * RequestMessage interface to parse data to a request, implements the * RequestVisitor interface to select the correct backend method to query and * provide a interface to serialize the result to the wire format. */ class SessionContext : public RequestVisitor { private: ... void visit(ErrorRequest const &request); public: /** * \brief Process raw request data in wire format. * * This is the entry point for the context to process its request. It * gets the wire data of the request, transforms it to a request * instance and give it to the backend. The result from the backend will * be stored internally. * * \param data The pointer to the raw request data. * \param size The size of the request data. * \return false in case of error. */ bool process_data(void const *data, size_t const size); /** * \brief Encode the current reply data to the wire format. * * This serialize the current state (result from the backend or error * message) to the reply wire format and put the result into the given * string reference. * * \param out The string to write the result to. */ void encode_result(std::string &out); /** * \brief Creat a session context. * * This is done for every incomming data packet. * * \param provider The DataProvider to use for this request. */ SessionContext(DataProvider::ptr_t provider); virtual ~SessionContext() { } }; ...
98682412dccf2a5e38f0f701dbfe452e4e87a8aa
wagtail/admin/urls/password_reset.py
wagtail/admin/urls/password_reset.py
from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
Use new-style URL paths for password reset views
Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp).
Python
bsd-3-clause
torchbox/wagtail,kaedroho/wagtail,thenewguy/wagtail,zerolab/wagtail,torchbox/wagtail,thenewguy/wagtail,gasman/wagtail,wagtail/wagtail,gasman/wagtail,FlipperPA/wagtail,takeflight/wagtail,takeflight/wagtail,rsalmaso/wagtail,jnns/wagtail,wagtail/wagtail,gasman/wagtail,mixxorz/wagtail,takeflight/wagtail,jnns/wagtail,rsalmaso/wagtail,zerolab/wagtail,jnns/wagtail,mixxorz/wagtail,kaedroho/wagtail,kaedroho/wagtail,zerolab/wagtail,thenewguy/wagtail,kaedroho/wagtail,rsalmaso/wagtail,zerolab/wagtail,FlipperPA/wagtail,jnns/wagtail,torchbox/wagtail,FlipperPA/wagtail,wagtail/wagtail,torchbox/wagtail,takeflight/wagtail,mixxorz/wagtail,zerolab/wagtail,rsalmaso/wagtail,FlipperPA/wagtail,mixxorz/wagtail,thenewguy/wagtail,gasman/wagtail,mixxorz/wagtail,wagtail/wagtail,kaedroho/wagtail,wagtail/wagtail,gasman/wagtail,thenewguy/wagtail,rsalmaso/wagtail
python
## Code Before: from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ] ## Instruction: Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp). ## Code After: from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
// ... existing code ... from django.urls import path from wagtail.admin.views import account // ... modified code ... path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( // ... rest of the code ...
d675dbcab18d56ae4c2c2f05d342159c1032b7b4
polling_stations/apps/data_importers/management/commands/import_fake_exeter.py
polling_stations/apps/data_importers/management/commands/import_fake_exeter.py
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter from pathlib import Path def make_base_folder_path(): base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE") return str(base_folder_path) class Command(BaseXpressDemocracyClubCsvImporter): local_files = True base_folder_path = make_base_folder_path() council_id = "EXE" addresses_name = "Democracy_Club__02May2019exe.CSV" stations_name = "Democracy_Club__02May2019exe.CSV"
from django.contrib.gis.geos import Point from addressbase.models import UprnToCouncil from data_importers.mixins import AdvanceVotingMixin from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter from pathlib import Path from pollingstations.models import AdvanceVotingStation def make_base_folder_path(): base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE") return str(base_folder_path) class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin): local_files = True base_folder_path = make_base_folder_path() council_id = "EXE" addresses_name = "Democracy_Club__02May2019exe.CSV" stations_name = "Democracy_Club__02May2019exe.CSV" def add_advance_voting_stations(self): advance_station = AdvanceVotingStation( name="Exeter Guildhall", address="""Exeter City Council Civic Centre Paris Street Exeter Devon """, postcode="EX1 1JN", location=Point(-3.5245510056787057, 50.72486002944331, srid=4326), ) advance_station.save() UprnToCouncil.objects.filter(lad=self.council.geography.gss).update( advance_voting_station=advance_station )
Add Advance Voting stations to fake Exeter importer
Add Advance Voting stations to fake Exeter importer
Python
bsd-3-clause
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
python
## Code Before: from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter from pathlib import Path def make_base_folder_path(): base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE") return str(base_folder_path) class Command(BaseXpressDemocracyClubCsvImporter): local_files = True base_folder_path = make_base_folder_path() council_id = "EXE" addresses_name = "Democracy_Club__02May2019exe.CSV" stations_name = "Democracy_Club__02May2019exe.CSV" ## Instruction: Add Advance Voting stations to fake Exeter importer ## Code After: from django.contrib.gis.geos import Point from addressbase.models import UprnToCouncil from data_importers.mixins import AdvanceVotingMixin from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter from pathlib import Path from pollingstations.models import AdvanceVotingStation def make_base_folder_path(): base_folder_path = Path.cwd() / Path("test_data/pollingstations_data/EXE") return str(base_folder_path) class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin): local_files = True base_folder_path = make_base_folder_path() council_id = "EXE" addresses_name = "Democracy_Club__02May2019exe.CSV" stations_name = "Democracy_Club__02May2019exe.CSV" def add_advance_voting_stations(self): advance_station = AdvanceVotingStation( name="Exeter Guildhall", address="""Exeter City Council Civic Centre Paris Street Exeter Devon """, postcode="EX1 1JN", location=Point(-3.5245510056787057, 50.72486002944331, srid=4326), ) advance_station.save() UprnToCouncil.objects.filter(lad=self.council.geography.gss).update( advance_voting_station=advance_station )
... from django.contrib.gis.geos import Point from addressbase.models import UprnToCouncil from data_importers.mixins import AdvanceVotingMixin from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter from pathlib import Path from pollingstations.models import AdvanceVotingStation def make_base_folder_path(): ... return str(base_folder_path) class Command(BaseXpressDemocracyClubCsvImporter, AdvanceVotingMixin): local_files = True base_folder_path = make_base_folder_path() council_id = "EXE" addresses_name = "Democracy_Club__02May2019exe.CSV" stations_name = "Democracy_Club__02May2019exe.CSV" def add_advance_voting_stations(self): advance_station = AdvanceVotingStation( name="Exeter Guildhall", address="""Exeter City Council Civic Centre Paris Street Exeter Devon """, postcode="EX1 1JN", location=Point(-3.5245510056787057, 50.72486002944331, srid=4326), ) advance_station.save() UprnToCouncil.objects.filter(lad=self.council.geography.gss).update( advance_voting_station=advance_station ) ...
106d4d7bf624103cb96f9d7998a90e2d40969df2
DKCategories/NSData+DK.h
DKCategories/NSData+DK.h
// // NSData+DK.h // // Created by dkhamsing on 2/24/14. // #import <Foundation/Foundation.h> @interface NSData (DK) /** Load session cookies. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesLoad; /** Save session cookies. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesSave; @end
// // NSData+DK.h // // Created by dkhamsing on 2/24/14. // #import <Foundation/Foundation.h> @interface NSData (DK) /** Load session cookies. @param log Boolean that outputs with NSLog. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesLoadWithLog:(BOOL)log; /** Load session cookies without logging. */ + (void)dk_cookiesLoad; /** Save session cookies. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesSave; @end
Add log option for loading cookies
Add log option for loading cookies
C
mit
dkhamsing/DKCategories
c
## Code Before: // // NSData+DK.h // // Created by dkhamsing on 2/24/14. // #import <Foundation/Foundation.h> @interface NSData (DK) /** Load session cookies. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesLoad; /** Save session cookies. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesSave; @end ## Instruction: Add log option for loading cookies ## Code After: // // NSData+DK.h // // Created by dkhamsing on 2/24/14. // #import <Foundation/Foundation.h> @interface NSData (DK) /** Load session cookies. @param log Boolean that outputs with NSLog. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesLoadWithLog:(BOOL)log; /** Load session cookies without logging. */ + (void)dk_cookiesLoad; /** Save session cookies. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesSave; @end
# ... existing code ... /** Load session cookies. @param log Boolean that outputs with NSLog. Credits: http://stackoverflow.com/questions/14387662/afnetworking-persisting-cookies-automatically */ + (void)dk_cookiesLoadWithLog:(BOOL)log; /** Load session cookies without logging. */ + (void)dk_cookiesLoad; /** Save session cookies. # ... modified code ... + (void)dk_cookiesSave; @end # ... rest of the code ...
63ef00b976e875ce0e2f900698493a3f01db90a7
vm/src/org/mozartoz/truffle/Main.java
vm/src/org/mozartoz/truffle/Main.java
package org.mozartoz.truffle; import org.mozartoz.truffle.translator.Loader; import com.oracle.truffle.api.source.Source; public class Main { private static final String BASE_TESTS = Loader.MOZART2_DIR + "/platform-test/base/"; private static final String[] PASSING_TESTS = { BASE_TESTS + "int.oz", BASE_TESTS + "dictionary.oz", BASE_TESTS + "record.oz", BASE_TESTS + "state.oz", BASE_TESTS + "exception.oz", BASE_TESTS + "float.oz", BASE_TESTS + "conversion.oz", BASE_TESTS + "type.oz", }; public static void main(String[] args) { // Source source = Loader.createSource("test.oz"); // Loader.getInstance().run(source); Source source = Loader.createSource("simple_runner.oz"); Loader.getInstance().runFunctor(source, PASSING_TESTS); } }
package org.mozartoz.truffle; import org.mozartoz.truffle.translator.Loader; import com.oracle.truffle.api.source.Source; public class Main { private static final String BASE_TESTS = Loader.MOZART2_DIR + "/platform-test/base/"; private static final String TEST_RUNNER = Loader.MOZART2_DIR + "/platform-test/simple_runner.oz"; private static final String[] PASSING_TESTS = { BASE_TESTS + "int.oz", BASE_TESTS + "dictionary.oz", BASE_TESTS + "record.oz", BASE_TESTS + "state.oz", BASE_TESTS + "exception.oz", BASE_TESTS + "float.oz", BASE_TESTS + "conversion.oz", BASE_TESTS + "type.oz", }; public static void main(String[] args) { // Source source = Loader.createSource("test.oz"); // Loader.getInstance().run(source); Source source = Loader.createSource(TEST_RUNNER); Loader.getInstance().runFunctor(source, PASSING_TESTS); } }
Use simple_runner from the platform-test dir
Use simple_runner from the platform-test dir
Java
bsd-2-clause
eregon/mozart-graal,eregon/mozart-graal,eregon/mozart-graal,mistasse/mozart-graal,mistasse/mozart-graal,eregon/mozart-graal,eregon/mozart-graal,mistasse/mozart-graal,eregon/mozart-graal,mistasse/mozart-graal,mistasse/mozart-graal
java
## Code Before: package org.mozartoz.truffle; import org.mozartoz.truffle.translator.Loader; import com.oracle.truffle.api.source.Source; public class Main { private static final String BASE_TESTS = Loader.MOZART2_DIR + "/platform-test/base/"; private static final String[] PASSING_TESTS = { BASE_TESTS + "int.oz", BASE_TESTS + "dictionary.oz", BASE_TESTS + "record.oz", BASE_TESTS + "state.oz", BASE_TESTS + "exception.oz", BASE_TESTS + "float.oz", BASE_TESTS + "conversion.oz", BASE_TESTS + "type.oz", }; public static void main(String[] args) { // Source source = Loader.createSource("test.oz"); // Loader.getInstance().run(source); Source source = Loader.createSource("simple_runner.oz"); Loader.getInstance().runFunctor(source, PASSING_TESTS); } } ## Instruction: Use simple_runner from the platform-test dir ## Code After: package org.mozartoz.truffle; import org.mozartoz.truffle.translator.Loader; import com.oracle.truffle.api.source.Source; public class Main { private static final String BASE_TESTS = Loader.MOZART2_DIR + "/platform-test/base/"; private static final String TEST_RUNNER = Loader.MOZART2_DIR + "/platform-test/simple_runner.oz"; private static final String[] PASSING_TESTS = { BASE_TESTS + "int.oz", BASE_TESTS + "dictionary.oz", BASE_TESTS + "record.oz", BASE_TESTS + "state.oz", BASE_TESTS + "exception.oz", BASE_TESTS + "float.oz", BASE_TESTS + "conversion.oz", BASE_TESTS + "type.oz", }; public static void main(String[] args) { // Source source = Loader.createSource("test.oz"); // Loader.getInstance().run(source); Source source = Loader.createSource(TEST_RUNNER); Loader.getInstance().runFunctor(source, PASSING_TESTS); } }
// ... existing code ... public class Main { private static final String BASE_TESTS = Loader.MOZART2_DIR + "/platform-test/base/"; private static final String TEST_RUNNER = Loader.MOZART2_DIR + "/platform-test/simple_runner.oz"; private static final String[] PASSING_TESTS = { BASE_TESTS + "int.oz", // ... modified code ... // Source source = Loader.createSource("test.oz"); // Loader.getInstance().run(source); Source source = Loader.createSource(TEST_RUNNER); Loader.getInstance().runFunctor(source, PASSING_TESTS); } // ... rest of the code ...
7f13b29cc918f63c4d1fc24717c0a0b5d2f5f8ad
filter.py
filter.py
import numpy as np class LowPassFilter(object): ''' First order discrete IIR filter. ''' def __init__(self, feedback_gain, initial_value=0.0): self.feedback_gain = np.ones_like(initial_value) * feedback_gain self.initial_value = initial_value self.output_gain = 1.0 - feedback_gain self.input = np.nan self.output = initial_value self.feedback_value = initial_value / self.output_gain def filter(self, value): #if not math.isanan(value) and math.isinf(value): self.input = value self.feedback_value = value + self.feedback_gain * self.feedback_value self.output = self.output_gain * self.feedback_value return self.output class MovingAverage(object): ''' Moving average filter. ''' def __init__(self, lifetime, sampling_time): self.lifetime = lifetime self.sampling_time = sampling_time self.exp = np.exp(-sampling_time / lifetime) self.last_value = np.nan self.mean_value = np.nan def filter(self, value): self.last_value = value if np.isnan(self.mean_value): self.mean_value = value else: self.mean_value = value + self.exp * (self.mean_value - value) return self.mean_value
import numpy as np class LowPassFilter(object): ''' First order discrete IIR filter. ''' def __init__(self, feedback_gain, initial_value=0.0): self.feedback_gain = np.ones_like(initial_value) * feedback_gain self.initial_value = initial_value self.output_gain = 1.0 - feedback_gain self.input = np.nan self.output = initial_value self.feedback_value = initial_value / self.output_gain def filter(self, value): #if not math.isanan(value) and math.isinf(value): self.input = value self.feedback_value = value + self.feedback_gain * self.feedback_value self.output = self.output_gain * self.feedback_value return self.output class MovingAverage(object): ''' Moving average filter. ''' def __init__(self, lifetime, sampling_time): self.lifetime = lifetime self.sampling_time = sampling_time self.exp = np.exp(-sampling_time / lifetime) self.last_value = None self.mean_value = None def filter(self, value): self.last_value = value if self.mean_value is None: self.mean_value = value else: self.mean_value = value + self.exp * (self.mean_value - value) return self.mean_value
Fix problem with array values.
Fix problem with array values.
Python
mit
jcsharp/DriveIt
python
## Code Before: import numpy as np class LowPassFilter(object): ''' First order discrete IIR filter. ''' def __init__(self, feedback_gain, initial_value=0.0): self.feedback_gain = np.ones_like(initial_value) * feedback_gain self.initial_value = initial_value self.output_gain = 1.0 - feedback_gain self.input = np.nan self.output = initial_value self.feedback_value = initial_value / self.output_gain def filter(self, value): #if not math.isanan(value) and math.isinf(value): self.input = value self.feedback_value = value + self.feedback_gain * self.feedback_value self.output = self.output_gain * self.feedback_value return self.output class MovingAverage(object): ''' Moving average filter. ''' def __init__(self, lifetime, sampling_time): self.lifetime = lifetime self.sampling_time = sampling_time self.exp = np.exp(-sampling_time / lifetime) self.last_value = np.nan self.mean_value = np.nan def filter(self, value): self.last_value = value if np.isnan(self.mean_value): self.mean_value = value else: self.mean_value = value + self.exp * (self.mean_value - value) return self.mean_value ## Instruction: Fix problem with array values. ## Code After: import numpy as np class LowPassFilter(object): ''' First order discrete IIR filter. ''' def __init__(self, feedback_gain, initial_value=0.0): self.feedback_gain = np.ones_like(initial_value) * feedback_gain self.initial_value = initial_value self.output_gain = 1.0 - feedback_gain self.input = np.nan self.output = initial_value self.feedback_value = initial_value / self.output_gain def filter(self, value): #if not math.isanan(value) and math.isinf(value): self.input = value self.feedback_value = value + self.feedback_gain * self.feedback_value self.output = self.output_gain * self.feedback_value return self.output class MovingAverage(object): ''' Moving average filter. ''' def __init__(self, lifetime, sampling_time): self.lifetime = lifetime self.sampling_time = sampling_time self.exp = np.exp(-sampling_time / lifetime) self.last_value = None self.mean_value = None def filter(self, value): self.last_value = value if self.mean_value is None: self.mean_value = value else: self.mean_value = value + self.exp * (self.mean_value - value) return self.mean_value
// ... existing code ... self.lifetime = lifetime self.sampling_time = sampling_time self.exp = np.exp(-sampling_time / lifetime) self.last_value = None self.mean_value = None def filter(self, value): self.last_value = value if self.mean_value is None: self.mean_value = value else: self.mean_value = value + self.exp * (self.mean_value - value) // ... rest of the code ...
fcc2a190a50327a2349dfbb8e93d3157a6c1f1e8
src/sentry/utils/versioning.py
src/sentry/utils/versioning.py
from __future__ import absolute_import import warnings from collections import namedtuple from sentry.exceptions import InvalidConfiguration class Version(namedtuple('Version', 'major minor patch')): def __str__(self): return '.'.join(map(str, self)) def make_upgrade_message(service, modality, version, hosts): return '{service} {modality} be upgraded to {version} on {hosts}.'.format( hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts), modality=modality, service=service, version=version, ) def check_versions(service, versions, required, recommended=None): """ Check that all members of mapping hosts fulfill version requirements. :param service: service label, such as ``Redis`` :param versions: mapping of host to ``Version`` :param required: lowest supported ``Version``. If any host does not fulfill this requirement, an ``InvalidConfiguration`` exception is raised. :param recommended: recommended version. If any host does not fulfill this requirement, a ``PendingDeprecationWarning`` is raised. """ must_upgrade = filter(lambda (host, version): required > version, versions.items()) if must_upgrade: raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade)) if recommended: should_upgrade = filter(lambda (host, version): recommended > version, versions.items()) if should_upgrade: warnings.warn( make_upgrade_message(service, 'should', recommended, should_upgrade), PendingDeprecationWarning, )
from __future__ import absolute_import import warnings from collections import namedtuple from sentry.exceptions import InvalidConfiguration class Version(namedtuple('Version', 'major minor patch')): def __str__(self): return '.'.join(map(str, self)) def make_upgrade_message(service, modality, version, hosts): return '{service} {modality} be upgraded to {version} on {hosts}.'.format( hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts), modality=modality, service=service, version=version, ) def check_versions(service, versions, required, recommended=None): """ Check that hosts fulfill version requirements. :param service: service label, such as ``Redis`` :param versions: mapping of host to ``Version`` :param required: lowest supported ``Version``. If any host does not fulfill this requirement, an ``InvalidConfiguration`` exception is raised. :param recommended: recommended version. If any host does not fulfill this requirement, a ``PendingDeprecationWarning`` is raised. """ must_upgrade = filter(lambda (host, version): required > version, versions.items()) if must_upgrade: raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade)) if recommended: should_upgrade = filter(lambda (host, version): recommended > version, versions.items()) if should_upgrade: warnings.warn( make_upgrade_message(service, 'should', recommended, should_upgrade), PendingDeprecationWarning, )
Fix strange wording in version check docstring.
Fix strange wording in version check docstring.
Python
bsd-3-clause
zenefits/sentry,BuildingLink/sentry,ifduyue/sentry,beeftornado/sentry,beeftornado/sentry,jean/sentry,jean/sentry,ifduyue/sentry,JackDanger/sentry,nicholasserra/sentry,gencer/sentry,fotinakis/sentry,mvaled/sentry,mvaled/sentry,daevaorn/sentry,jean/sentry,ifduyue/sentry,JamesMura/sentry,alexm92/sentry,daevaorn/sentry,fotinakis/sentry,imankulov/sentry,beeftornado/sentry,fotinakis/sentry,nicholasserra/sentry,mitsuhiko/sentry,zenefits/sentry,JackDanger/sentry,mvaled/sentry,gencer/sentry,gencer/sentry,jean/sentry,JamesMura/sentry,JamesMura/sentry,JackDanger/sentry,ifduyue/sentry,looker/sentry,gencer/sentry,mitsuhiko/sentry,imankulov/sentry,daevaorn/sentry,BuildingLink/sentry,nicholasserra/sentry,mvaled/sentry,BayanGroup/sentry,imankulov/sentry,BuildingLink/sentry,looker/sentry,BuildingLink/sentry,zenefits/sentry,alexm92/sentry,jean/sentry,fotinakis/sentry,alexm92/sentry,BuildingLink/sentry,BayanGroup/sentry,looker/sentry,zenefits/sentry,mvaled/sentry,gencer/sentry,JamesMura/sentry,zenefits/sentry,JamesMura/sentry,BayanGroup/sentry,looker/sentry,mvaled/sentry,daevaorn/sentry,ifduyue/sentry,looker/sentry
python
## Code Before: from __future__ import absolute_import import warnings from collections import namedtuple from sentry.exceptions import InvalidConfiguration class Version(namedtuple('Version', 'major minor patch')): def __str__(self): return '.'.join(map(str, self)) def make_upgrade_message(service, modality, version, hosts): return '{service} {modality} be upgraded to {version} on {hosts}.'.format( hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts), modality=modality, service=service, version=version, ) def check_versions(service, versions, required, recommended=None): """ Check that all members of mapping hosts fulfill version requirements. :param service: service label, such as ``Redis`` :param versions: mapping of host to ``Version`` :param required: lowest supported ``Version``. If any host does not fulfill this requirement, an ``InvalidConfiguration`` exception is raised. :param recommended: recommended version. If any host does not fulfill this requirement, a ``PendingDeprecationWarning`` is raised. """ must_upgrade = filter(lambda (host, version): required > version, versions.items()) if must_upgrade: raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade)) if recommended: should_upgrade = filter(lambda (host, version): recommended > version, versions.items()) if should_upgrade: warnings.warn( make_upgrade_message(service, 'should', recommended, should_upgrade), PendingDeprecationWarning, ) ## Instruction: Fix strange wording in version check docstring. ## Code After: from __future__ import absolute_import import warnings from collections import namedtuple from sentry.exceptions import InvalidConfiguration class Version(namedtuple('Version', 'major minor patch')): def __str__(self): return '.'.join(map(str, self)) def make_upgrade_message(service, modality, version, hosts): return '{service} {modality} be upgraded to {version} on {hosts}.'.format( hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts), modality=modality, service=service, version=version, ) def check_versions(service, versions, required, recommended=None): """ Check that hosts fulfill version requirements. :param service: service label, such as ``Redis`` :param versions: mapping of host to ``Version`` :param required: lowest supported ``Version``. If any host does not fulfill this requirement, an ``InvalidConfiguration`` exception is raised. :param recommended: recommended version. If any host does not fulfill this requirement, a ``PendingDeprecationWarning`` is raised. """ must_upgrade = filter(lambda (host, version): required > version, versions.items()) if must_upgrade: raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade)) if recommended: should_upgrade = filter(lambda (host, version): recommended > version, versions.items()) if should_upgrade: warnings.warn( make_upgrade_message(service, 'should', recommended, should_upgrade), PendingDeprecationWarning, )
# ... existing code ... def check_versions(service, versions, required, recommended=None): """ Check that hosts fulfill version requirements. :param service: service label, such as ``Redis`` :param versions: mapping of host to ``Version`` # ... rest of the code ...
d3f50efd67522f21781aaff2bdb6f2a1bf3151e0
setup.py
setup.py
from setuptools import setup, find_packages setup( name = 'dictsheet', version = '0.0.3', keywords = ('dictsheet', 'spreadsheet', 'gspread'), description = 'Dict wrapper for google spreadsheet', license = 'MIT License', install_requires = ['gspread>=0.4.1'], url = 'https://github.com/previa/dictsheet', author = 'Chandler Huang, Xander Li', author_email = '[email protected]', packages = find_packages(), platforms = 'any', )
from setuptools import setup, find_packages from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) reqs = [str(ir.req) for ir in install_reqs] # REFERENCE: # http://stackoverflow.com/questions/14399534/how-can-i-reference-requirements-txt-for-the-install-requires-kwarg-in-setuptool setup( name = 'dictsheet', version = '0.0.9', keywords = ('dictsheet', 'spreadsheet', 'gspread'), description = 'Dict wrapper for google spreadsheet', license = 'MIT License', install_requires = reqs, data_files = ['requirements.txt', 'README.md', 'LICENSE.txt'], url = 'https://github.com/previa/dictsheet', author = 'Chandler Huang, Xander Li', author_email = '[email protected]', packages = find_packages(), platforms = 'any', )
Include packages in requirements.txt to install_requires().
[Feature] Include packages in requirements.txt to install_requires().
Python
mit
previa/dictsheet
python
## Code Before: from setuptools import setup, find_packages setup( name = 'dictsheet', version = '0.0.3', keywords = ('dictsheet', 'spreadsheet', 'gspread'), description = 'Dict wrapper for google spreadsheet', license = 'MIT License', install_requires = ['gspread>=0.4.1'], url = 'https://github.com/previa/dictsheet', author = 'Chandler Huang, Xander Li', author_email = '[email protected]', packages = find_packages(), platforms = 'any', ) ## Instruction: [Feature] Include packages in requirements.txt to install_requires(). ## Code After: from setuptools import setup, find_packages from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) reqs = [str(ir.req) for ir in install_reqs] # REFERENCE: # http://stackoverflow.com/questions/14399534/how-can-i-reference-requirements-txt-for-the-install-requires-kwarg-in-setuptool setup( name = 'dictsheet', version = '0.0.9', keywords = ('dictsheet', 'spreadsheet', 'gspread'), description = 'Dict wrapper for google spreadsheet', license = 'MIT License', install_requires = reqs, data_files = ['requirements.txt', 'README.md', 'LICENSE.txt'], url = 'https://github.com/previa/dictsheet', author = 'Chandler Huang, Xander Li', author_email = '[email protected]', packages = find_packages(), platforms = 'any', )
# ... existing code ... from setuptools import setup, find_packages from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) reqs = [str(ir.req) for ir in install_reqs] # REFERENCE: # http://stackoverflow.com/questions/14399534/how-can-i-reference-requirements-txt-for-the-install-requires-kwarg-in-setuptool setup( name = 'dictsheet', version = '0.0.9', keywords = ('dictsheet', 'spreadsheet', 'gspread'), description = 'Dict wrapper for google spreadsheet', license = 'MIT License', install_requires = reqs, data_files = ['requirements.txt', 'README.md', 'LICENSE.txt'], url = 'https://github.com/previa/dictsheet', author = 'Chandler Huang, Xander Li', # ... rest of the code ...
23acd0ae9d2f2cc61722359b648580ebd984d108
mobile/src/main/java/com/ianhanniballake/contractiontimer/appwidget/AppWidgetUpdateHandler.kt
mobile/src/main/java/com/ianhanniballake/contractiontimer/appwidget/AppWidgetUpdateHandler.kt
package com.ianhanniballake.contractiontimer.appwidget import android.content.Context import android.os.Build /** * Handles updating all App Widgets */ abstract class AppWidgetUpdateHandler { companion object { /** * Creates a version appropriate AppWidgetUpdateHandler instance * * @return an appropriate AppWidgetUpdateHandler */ @JvmStatic fun createInstance(): AppWidgetUpdateHandler { return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) AppWidgetUpdateHandlerV11() else AppWidgetUpdateHandlerBase() } } /** * Updates all App Widgets with the latest information. This should be called whenever a * contraction is updated * * @param context Context used to trigger updates, must not be null */ abstract fun updateAllWidgets(context: Context) }
package com.ianhanniballake.contractiontimer.appwidget import android.content.Context import android.os.Build /** * Handles updating all App Widgets */ abstract class AppWidgetUpdateHandler { companion object { /** * Creates a version appropriate AppWidgetUpdateHandler instance * * @return an appropriate AppWidgetUpdateHandler */ fun createInstance(): AppWidgetUpdateHandler { return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) AppWidgetUpdateHandlerV11() else AppWidgetUpdateHandlerBase() } } /** * Updates all App Widgets with the latest information. This should be called whenever a * contraction is updated * * @param context Context used to trigger updates, must not be null */ abstract fun updateAllWidgets(context: Context) }
Remove no longer needed @JvmStatic
Remove no longer needed @JvmStatic
Kotlin
bsd-3-clause
ianhanniballake/ContractionTimer,ianhanniballake/ContractionTimer
kotlin
## Code Before: package com.ianhanniballake.contractiontimer.appwidget import android.content.Context import android.os.Build /** * Handles updating all App Widgets */ abstract class AppWidgetUpdateHandler { companion object { /** * Creates a version appropriate AppWidgetUpdateHandler instance * * @return an appropriate AppWidgetUpdateHandler */ @JvmStatic fun createInstance(): AppWidgetUpdateHandler { return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) AppWidgetUpdateHandlerV11() else AppWidgetUpdateHandlerBase() } } /** * Updates all App Widgets with the latest information. This should be called whenever a * contraction is updated * * @param context Context used to trigger updates, must not be null */ abstract fun updateAllWidgets(context: Context) } ## Instruction: Remove no longer needed @JvmStatic ## Code After: package com.ianhanniballake.contractiontimer.appwidget import android.content.Context import android.os.Build /** * Handles updating all App Widgets */ abstract class AppWidgetUpdateHandler { companion object { /** * Creates a version appropriate AppWidgetUpdateHandler instance * * @return an appropriate AppWidgetUpdateHandler */ fun createInstance(): AppWidgetUpdateHandler { return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) AppWidgetUpdateHandlerV11() else AppWidgetUpdateHandlerBase() } } /** * Updates all App Widgets with the latest information. This should be called whenever a * contraction is updated * * @param context Context used to trigger updates, must not be null */ abstract fun updateAllWidgets(context: Context) }
... * * @return an appropriate AppWidgetUpdateHandler */ fun createInstance(): AppWidgetUpdateHandler { return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) AppWidgetUpdateHandlerV11() ...
ed7d0c5f8b64185f9fc612b44e4182b12a0fa62e
yunity/users/factories.py
yunity/users/factories.py
from django.contrib.auth import get_user_model from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory from yunity.walls.factories import Wall from yunity.utils.tests.fake import faker class User(DjangoModelFactory): class Meta: model = get_user_model() strategy = CREATE_STRATEGY is_active = True is_staff = False display_name = LazyAttribute(lambda _: faker.name()) first_name = LazyAttribute(lambda _: faker.name()) last_name = LazyAttribute(lambda _: faker.name()) email = LazyAttribute(lambda _: faker.email()) password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name)) wall = SubFactory(Wall)
from django.contrib.auth import get_user_model from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory from yunity.walls.factories import Wall from yunity.utils.tests.fake import faker class User(DjangoModelFactory): class Meta: model = get_user_model() strategy = CREATE_STRATEGY is_active = True is_staff = False display_name = LazyAttribute(lambda _: faker.name()) first_name = LazyAttribute(lambda _: faker.name()) last_name = LazyAttribute(lambda _: faker.name()) email = LazyAttribute(lambda _: faker.email()) # Use display_name as password, as it is readable password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name)) wall = SubFactory(Wall)
Comment about display_name == password
Comment about display_name == password
Python
agpl-3.0
yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
python
## Code Before: from django.contrib.auth import get_user_model from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory from yunity.walls.factories import Wall from yunity.utils.tests.fake import faker class User(DjangoModelFactory): class Meta: model = get_user_model() strategy = CREATE_STRATEGY is_active = True is_staff = False display_name = LazyAttribute(lambda _: faker.name()) first_name = LazyAttribute(lambda _: faker.name()) last_name = LazyAttribute(lambda _: faker.name()) email = LazyAttribute(lambda _: faker.email()) password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name)) wall = SubFactory(Wall) ## Instruction: Comment about display_name == password ## Code After: from django.contrib.auth import get_user_model from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory from yunity.walls.factories import Wall from yunity.utils.tests.fake import faker class User(DjangoModelFactory): class Meta: model = get_user_model() strategy = CREATE_STRATEGY is_active = True is_staff = False display_name = LazyAttribute(lambda _: faker.name()) first_name = LazyAttribute(lambda _: faker.name()) last_name = LazyAttribute(lambda _: faker.name()) email = LazyAttribute(lambda _: faker.email()) # Use display_name as password, as it is readable password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name)) wall = SubFactory(Wall)
# ... existing code ... first_name = LazyAttribute(lambda _: faker.name()) last_name = LazyAttribute(lambda _: faker.name()) email = LazyAttribute(lambda _: faker.email()) # Use display_name as password, as it is readable password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name)) wall = SubFactory(Wall) # ... rest of the code ...
618bf6d4c1fc5e60b7e94d1ad1030bf2cf0de5c2
src/main/python/alppaca/server_mock/__init__.py
src/main/python/alppaca/server_mock/__init__.py
from __future__ import print_function, absolute_import, unicode_literals, division from datetime import datetime, timedelta from textwrap import dedent from bottle import Bottle import pytz """ Super simple IMS mock. Just listens on localhost:8080 for the appropriate url, returns a test role and a dummy JSON response. """ def expiration_10s_from_now(): n = datetime.now(tz=pytz.utc) + timedelta(seconds=10) return n.strftime("%Y-%m-%dT%H:%M:%SZ") class MockIms(Bottle): PATH = '/latest/meta-data/iam/security-credentials/' json_response = dedent(""" {"Code": "Success", "AccessKeyId": "ASIAI", "SecretAccessKey": "XXYYZZ", "Token": "0123456789abcdefghijklmnopqrstuvwxyzAB", "Expiration": "%s", "Type": "AWS-HMAC"} """) def __init__(self): super(MockIms, self).__init__() self.route(self.PATH, callback=self.get_roles) self.route(self.PATH + '<role>', callback=self.get_credentials) def get_roles(self): return 'test_role' def get_credentials(self, role): return self.json_response % expiration_10s_from_now() if role == 'test_role' else '' if __name__ == "__main__": MockIms().run()
from __future__ import print_function, absolute_import, unicode_literals, division from datetime import datetime, timedelta from textwrap import dedent from bottle import Bottle import pytz def expiration_10s_from_now(): n = datetime.now(tz=pytz.utc) + timedelta(seconds=10) return n.strftime("%Y-%m-%dT%H:%M:%SZ") class MockIms(Bottle): PATH = '/latest/meta-data/iam/security-credentials/' json_response = dedent(""" {"Code": "Success", "AccessKeyId": "ASIAI", "SecretAccessKey": "XXYYZZ", "Token": "0123456789abcdefghijklmnopqrstuvwxyzAB", "Expiration": "%s", "Type": "AWS-HMAC"} """) def __init__(self): super(MockIms, self).__init__() self.route(self.PATH, callback=self.get_roles) self.route(self.PATH + '<role>', callback=self.get_credentials) def get_roles(self): return 'test_role' def get_credentials(self, role): return self.json_response % expiration_10s_from_now() if role == 'test_role' else '' if __name__ == "__main__": MockIms().run()
Move string above the imports so it becomes a docstring
Move string above the imports so it becomes a docstring
Python
apache-2.0
ImmobilienScout24/afp-alppaca,ImmobilienScout24/alppaca,ImmobilienScout24/alppaca,ImmobilienScout24/afp-alppaca
python
## Code Before: from __future__ import print_function, absolute_import, unicode_literals, division from datetime import datetime, timedelta from textwrap import dedent from bottle import Bottle import pytz """ Super simple IMS mock. Just listens on localhost:8080 for the appropriate url, returns a test role and a dummy JSON response. """ def expiration_10s_from_now(): n = datetime.now(tz=pytz.utc) + timedelta(seconds=10) return n.strftime("%Y-%m-%dT%H:%M:%SZ") class MockIms(Bottle): PATH = '/latest/meta-data/iam/security-credentials/' json_response = dedent(""" {"Code": "Success", "AccessKeyId": "ASIAI", "SecretAccessKey": "XXYYZZ", "Token": "0123456789abcdefghijklmnopqrstuvwxyzAB", "Expiration": "%s", "Type": "AWS-HMAC"} """) def __init__(self): super(MockIms, self).__init__() self.route(self.PATH, callback=self.get_roles) self.route(self.PATH + '<role>', callback=self.get_credentials) def get_roles(self): return 'test_role' def get_credentials(self, role): return self.json_response % expiration_10s_from_now() if role == 'test_role' else '' if __name__ == "__main__": MockIms().run() ## Instruction: Move string above the imports so it becomes a docstring ## Code After: from __future__ import print_function, absolute_import, unicode_literals, division from datetime import datetime, timedelta from textwrap import dedent from bottle import Bottle import pytz def expiration_10s_from_now(): n = datetime.now(tz=pytz.utc) + timedelta(seconds=10) return n.strftime("%Y-%m-%dT%H:%M:%SZ") class MockIms(Bottle): PATH = '/latest/meta-data/iam/security-credentials/' json_response = dedent(""" {"Code": "Success", "AccessKeyId": "ASIAI", "SecretAccessKey": "XXYYZZ", "Token": "0123456789abcdefghijklmnopqrstuvwxyzAB", "Expiration": "%s", "Type": "AWS-HMAC"} """) def __init__(self): super(MockIms, self).__init__() self.route(self.PATH, callback=self.get_roles) self.route(self.PATH + '<role>', callback=self.get_credentials) def get_roles(self): return 'test_role' def get_credentials(self, role): return self.json_response % expiration_10s_from_now() if role == 'test_role' else '' if __name__ == "__main__": MockIms().run()
... from bottle import Bottle import pytz def expiration_10s_from_now(): ...
230ea3b21a8daabde9b2e0dcd93dedb5b5a87003
ppapi/native_client/src/shared/ppapi_proxy/ppruntime.h
ppapi/native_client/src/shared/ppapi_proxy/ppruntime.h
/* * Copyright (c) 2012 The Chromium Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #define NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #include "native_client/src/include/portability.h" #include "native_client/src/untrusted/irt/irt_ppapi.h" EXTERN_C_BEGIN // Initialize srpc connection to the browser. Some APIs like manifest file // opening do not need full ppapi initialization and so can be used after // this function returns. int IrtInit(void); // The entry point for the main thread of the PPAPI plugin process. int PpapiPluginMain(void); void PpapiPluginRegisterThreadCreator( const struct PP_ThreadFunctions* new_funcs); EXTERN_C_END #endif // NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_
/* * Copyright (c) 2012 The Chromium Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #define NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #include "native_client/src/include/portability.h" #include "native_client/src/untrusted/irt/irt_ppapi.h" EXTERN_C_BEGIN // The entry point for the main thread of the PPAPI plugin process. int PpapiPluginMain(void); void PpapiPluginRegisterThreadCreator( const struct PP_ThreadFunctions* new_funcs); EXTERN_C_END #endif // NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_
Remove declaration of IrtInit(), which is no longer defined anywhere
NaCl: Remove declaration of IrtInit(), which is no longer defined anywhere BUG=https://code.google.com/p/nativeclient/issues/detail?id=3186 TEST=build Review URL: https://codereview.chromium.org/157803004 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@250121 0039d316-1c4b-4281-b951-d872f2087c98
C
bsd-3-clause
Jonekee/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,ChromiumWebApps/chromium,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,patrickm/chromium.src,M4sse/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,M4sse/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,anirudhSK/chromium,axinging/chromium-crosswalk,patrickm/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,patrickm/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ltilve/chromium,M4sse/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,ltilve/chromium,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,fujunwei/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,Chilledheart/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,littlstar/chromium.src,jaruba/chromium.src,ltilve/chromium,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,hgl888/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,Chilledheart/chromium,markYoungH/chromium.src,jaruba/chromium.src,anirudhSK/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Jonekee/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,anirudhSK/chromium,Just-D/chromium-1,patrickm/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,Just-D/chromium-1,ondra-novak/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk
c
## Code Before: /* * Copyright (c) 2012 The Chromium Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #define NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #include "native_client/src/include/portability.h" #include "native_client/src/untrusted/irt/irt_ppapi.h" EXTERN_C_BEGIN // Initialize srpc connection to the browser. Some APIs like manifest file // opening do not need full ppapi initialization and so can be used after // this function returns. int IrtInit(void); // The entry point for the main thread of the PPAPI plugin process. int PpapiPluginMain(void); void PpapiPluginRegisterThreadCreator( const struct PP_ThreadFunctions* new_funcs); EXTERN_C_END #endif // NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ ## Instruction: NaCl: Remove declaration of IrtInit(), which is no longer defined anywhere BUG=https://code.google.com/p/nativeclient/issues/detail?id=3186 TEST=build Review URL: https://codereview.chromium.org/157803004 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@250121 0039d316-1c4b-4281-b951-d872f2087c98 ## Code After: /* * Copyright (c) 2012 The Chromium Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #define NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_ #include "native_client/src/include/portability.h" #include "native_client/src/untrusted/irt/irt_ppapi.h" EXTERN_C_BEGIN // The entry point for the main thread of the PPAPI plugin process. int PpapiPluginMain(void); void PpapiPluginRegisterThreadCreator( const struct PP_ThreadFunctions* new_funcs); EXTERN_C_END #endif // NATIVE_CLIENT_SRC_SHARED_PPAPI_PROXY_PPRUNTIME_H_
// ... existing code ... EXTERN_C_BEGIN // The entry point for the main thread of the PPAPI plugin process. int PpapiPluginMain(void); // ... rest of the code ...
ba21f91e055ca09dc5adb73497ab89a9d663aec3
src/main/java/com/matt/forgehax/mods/services/LocalPlayerUpdateEventService.java
src/main/java/com/matt/forgehax/mods/services/LocalPlayerUpdateEventService.java
package com.matt.forgehax.mods.services; import com.matt.forgehax.events.LocalPlayerUpdateEvent; import com.matt.forgehax.util.mod.ServiceMod; import com.matt.forgehax.util.mod.loader.RegisterMod; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingEvent; import net.minecraftforge.fml.common.eventhandler.Event; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; /** Created on 6/14/2017 by fr1kin */ @RegisterMod public class LocalPlayerUpdateEventService extends ServiceMod { public LocalPlayerUpdateEventService() { super("LocalPlayerUpdateEventService"); } @SubscribeEvent public void onUpdate(LivingEvent.LivingUpdateEvent event) { if (MC.world != null && event.getEntityLiving().equals(MC.player)) { Event ev = new LocalPlayerUpdateEvent(event.getEntityLiving()); MinecraftForge.EVENT_BUS.post(ev); event.setCanceled(ev.isCanceled()); } } }
package com.matt.forgehax.mods.services; import static com.matt.forgehax.Helper.getLocalPlayer; import static com.matt.forgehax.Helper.getWorld; import com.matt.forgehax.events.LocalPlayerUpdateEvent; import com.matt.forgehax.util.mod.ServiceMod; import com.matt.forgehax.util.mod.loader.RegisterMod; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingEvent; import net.minecraftforge.fml.common.eventhandler.Event; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; /** Created on 6/14/2017 by fr1kin */ @RegisterMod public class LocalPlayerUpdateEventService extends ServiceMod { public LocalPlayerUpdateEventService() { super("LocalPlayerUpdateEventService"); } @SubscribeEvent public void onUpdate(LivingEvent.LivingUpdateEvent event) { if (getWorld() != null && !getWorld().isRemote && event.getEntityLiving().equals(getLocalPlayer())) { Event ev = new LocalPlayerUpdateEvent(event.getEntityLiving()); MinecraftForge.EVENT_BUS.post(ev); event.setCanceled(ev.isCanceled()); } } }
Check if world is remote so event isn't fired for single player entity tick
Check if world is remote so event isn't fired for single player entity tick
Java
mit
fr1kin/ForgeHax,fr1kin/ForgeHax
java
## Code Before: package com.matt.forgehax.mods.services; import com.matt.forgehax.events.LocalPlayerUpdateEvent; import com.matt.forgehax.util.mod.ServiceMod; import com.matt.forgehax.util.mod.loader.RegisterMod; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingEvent; import net.minecraftforge.fml.common.eventhandler.Event; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; /** Created on 6/14/2017 by fr1kin */ @RegisterMod public class LocalPlayerUpdateEventService extends ServiceMod { public LocalPlayerUpdateEventService() { super("LocalPlayerUpdateEventService"); } @SubscribeEvent public void onUpdate(LivingEvent.LivingUpdateEvent event) { if (MC.world != null && event.getEntityLiving().equals(MC.player)) { Event ev = new LocalPlayerUpdateEvent(event.getEntityLiving()); MinecraftForge.EVENT_BUS.post(ev); event.setCanceled(ev.isCanceled()); } } } ## Instruction: Check if world is remote so event isn't fired for single player entity tick ## Code After: package com.matt.forgehax.mods.services; import static com.matt.forgehax.Helper.getLocalPlayer; import static com.matt.forgehax.Helper.getWorld; import com.matt.forgehax.events.LocalPlayerUpdateEvent; import com.matt.forgehax.util.mod.ServiceMod; import com.matt.forgehax.util.mod.loader.RegisterMod; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingEvent; import net.minecraftforge.fml.common.eventhandler.Event; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; /** Created on 6/14/2017 by fr1kin */ @RegisterMod public class LocalPlayerUpdateEventService extends ServiceMod { public LocalPlayerUpdateEventService() { super("LocalPlayerUpdateEventService"); } @SubscribeEvent public void onUpdate(LivingEvent.LivingUpdateEvent event) { if (getWorld() != null && !getWorld().isRemote && event.getEntityLiving().equals(getLocalPlayer())) { Event ev = new LocalPlayerUpdateEvent(event.getEntityLiving()); MinecraftForge.EVENT_BUS.post(ev); event.setCanceled(ev.isCanceled()); } } }
... package com.matt.forgehax.mods.services; import static com.matt.forgehax.Helper.getLocalPlayer; import static com.matt.forgehax.Helper.getWorld; import com.matt.forgehax.events.LocalPlayerUpdateEvent; import com.matt.forgehax.util.mod.ServiceMod; ... @SubscribeEvent public void onUpdate(LivingEvent.LivingUpdateEvent event) { if (getWorld() != null && !getWorld().isRemote && event.getEntityLiving().equals(getLocalPlayer())) { Event ev = new LocalPlayerUpdateEvent(event.getEntityLiving()); MinecraftForge.EVENT_BUS.post(ev); event.setCanceled(ev.isCanceled()); ...
c88bab708293395d89fe7aa18db557575e2927f8
samples/hello-coroutines/build.gradle.kts
samples/hello-coroutines/build.gradle.kts
import org.jetbrains.kotlin.gradle.dsl.Coroutines plugins { application id("nebula.kotlin") version "1.1.0" } application { mainClassName = "samples.HelloCoroutinesKt" } kotlin { // configure<org.jetbrains.kotlin.gradle.dsl.KotlinProjectExtension> experimental.coroutines = Coroutines.ENABLE } repositories { jcenter() }
import org.jetbrains.kotlin.gradle.dsl.Coroutines plugins { application id("nebula.kotlin") version embeddedKotlinVersion } application { mainClassName = "samples.HelloCoroutinesKt" } kotlin { // configure<org.jetbrains.kotlin.gradle.dsl.KotlinProjectExtension> experimental.coroutines = Coroutines.ENABLE } repositories { jcenter() }
Revert "Decorrelate the embedded kotlin and nebula.kotlin versions in samples"
Revert "Decorrelate the embedded kotlin and nebula.kotlin versions in samples" This reverts commit 149ee7fecb88d6fe455a73039f1b83d7d3b7ce3b.
Kotlin
apache-2.0
gradle/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle-script-kotlin,gradle/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle-script-kotlin,robinverduijn/gradle
kotlin
## Code Before: import org.jetbrains.kotlin.gradle.dsl.Coroutines plugins { application id("nebula.kotlin") version "1.1.0" } application { mainClassName = "samples.HelloCoroutinesKt" } kotlin { // configure<org.jetbrains.kotlin.gradle.dsl.KotlinProjectExtension> experimental.coroutines = Coroutines.ENABLE } repositories { jcenter() } ## Instruction: Revert "Decorrelate the embedded kotlin and nebula.kotlin versions in samples" This reverts commit 149ee7fecb88d6fe455a73039f1b83d7d3b7ce3b. ## Code After: import org.jetbrains.kotlin.gradle.dsl.Coroutines plugins { application id("nebula.kotlin") version embeddedKotlinVersion } application { mainClassName = "samples.HelloCoroutinesKt" } kotlin { // configure<org.jetbrains.kotlin.gradle.dsl.KotlinProjectExtension> experimental.coroutines = Coroutines.ENABLE } repositories { jcenter() }
// ... existing code ... plugins { application id("nebula.kotlin") version embeddedKotlinVersion } application { // ... rest of the code ...
938840b27fd218eeaf9c253e9162392e653dff0b
snippet_parser/it.py
snippet_parser/it.py
from __future__ import unicode_literals from core import * def handle_bandiera(template): return template.get(1) def handle_citazione(template): if template.params: return '« ' + sp(template.params[0]) + ' »' class SnippetParser(SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('bandiera'): return handle_bandiera(template) elif template.name.matches('citazione'): return handle_citazione(template) return super(SnippetParser, self).strip_template( template, normalize, collapse)
from __future__ import unicode_literals from core import * class SnippetParser(SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('bandiera'): return self.handle_bandiera(template) elif template.name.matches('citazione'): return self.handle_citazione(template) return super(SnippetParser, self).strip_template( template, normalize, collapse) def handle_bandiera(template): return template.get(1) def handle_citazione(template): if template.params: return '« ' + self.sp(template.params[0]) + ' »'
Fix snippet parser for Italian.
Fix snippet parser for Italian. Former-commit-id: bb8d10f5f8301fbcd4f4232612bf722d380a3d10
Python
mit
guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt
python
## Code Before: from __future__ import unicode_literals from core import * def handle_bandiera(template): return template.get(1) def handle_citazione(template): if template.params: return '« ' + sp(template.params[0]) + ' »' class SnippetParser(SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('bandiera'): return handle_bandiera(template) elif template.name.matches('citazione'): return handle_citazione(template) return super(SnippetParser, self).strip_template( template, normalize, collapse) ## Instruction: Fix snippet parser for Italian. Former-commit-id: bb8d10f5f8301fbcd4f4232612bf722d380a3d10 ## Code After: from __future__ import unicode_literals from core import * class SnippetParser(SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('bandiera'): return self.handle_bandiera(template) elif template.name.matches('citazione'): return self.handle_citazione(template) return super(SnippetParser, self).strip_template( template, normalize, collapse) def handle_bandiera(template): return template.get(1) def handle_citazione(template): if template.params: return '« ' + self.sp(template.params[0]) + ' »'
... from core import * class SnippetParser(SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('bandiera'): return self.handle_bandiera(template) elif template.name.matches('citazione'): return self.handle_citazione(template) return super(SnippetParser, self).strip_template( template, normalize, collapse) def handle_bandiera(template): return template.get(1) def handle_citazione(template): if template.params: return '« ' + self.sp(template.params[0]) + ' »' ...
d740326c4a5f4f930a4555cd9dd01fe1b3280997
config-model/src/main/java/com/yahoo/vespa/model/admin/monitoring/AutoscalingMetrics.java
config-model/src/main/java/com/yahoo/vespa/model/admin/monitoring/AutoscalingMetrics.java
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.model.admin.monitoring; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; /** * Metrics used for autoscaling * * @author bratseth */ public class AutoscalingMetrics { public static final MetricSet autoscalingMetricSet = create(); private static MetricSet create() { return new MetricSet("autoscaling", metrics("cpu.util", "mem.util", "disk.util", "application_generation", "in_service")); } private static Set<Metric> metrics(String ... metrics) { return Arrays.stream(metrics).map(Metric::new).collect(Collectors.toSet()); } }
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.model.admin.monitoring; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; /** * Metrics used for autoscaling * * @author bratseth */ public class AutoscalingMetrics { public static final MetricSet autoscalingMetricSet = create(); private static MetricSet create() { return new MetricSet("autoscaling", metrics("cpu.util", "mem.util", "disk.util", "application_generation", "in_service", "queries.rate")); } private static Set<Metric> metrics(String ... metrics) { return Arrays.stream(metrics).map(Metric::new).collect(Collectors.toSet()); } }
Add queries.rate to the 'autoscaling' consumer.
Add queries.rate to the 'autoscaling' consumer.
Java
apache-2.0
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
java
## Code Before: // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.model.admin.monitoring; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; /** * Metrics used for autoscaling * * @author bratseth */ public class AutoscalingMetrics { public static final MetricSet autoscalingMetricSet = create(); private static MetricSet create() { return new MetricSet("autoscaling", metrics("cpu.util", "mem.util", "disk.util", "application_generation", "in_service")); } private static Set<Metric> metrics(String ... metrics) { return Arrays.stream(metrics).map(Metric::new).collect(Collectors.toSet()); } } ## Instruction: Add queries.rate to the 'autoscaling' consumer. ## Code After: // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.model.admin.monitoring; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; /** * Metrics used for autoscaling * * @author bratseth */ public class AutoscalingMetrics { public static final MetricSet autoscalingMetricSet = create(); private static MetricSet create() { return new MetricSet("autoscaling", metrics("cpu.util", "mem.util", "disk.util", "application_generation", "in_service", "queries.rate")); } private static Set<Metric> metrics(String ... metrics) { return Arrays.stream(metrics).map(Metric::new).collect(Collectors.toSet()); } }
// ... existing code ... "mem.util", "disk.util", "application_generation", "in_service", "queries.rate")); } private static Set<Metric> metrics(String ... metrics) { // ... rest of the code ...
c82574aec4ee413198f54473cb47508a6b271f9a
dmf_device_ui/client.py
dmf_device_ui/client.py
import sys import zmq import time def main(): port = 5000 if len(sys.argv) > 1: port = sys.argv[1] int(port) bind_addr = "tcp://localhost:%s" % port context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect(bind_addr) socket.setsockopt(zmq.SUBSCRIBE,'') print "Listening for events on %s ..." % bind_addr while True: mssg = socket.recv() print mssg if __name__ == '__main__': main()
import sys import zmq import time def main(): port = 5000 if len(sys.argv) > 1: port = sys.argv[1] int(port) bind_addr = "tcp://localhost:%s" % port context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect(bind_addr) socket.setsockopt(zmq.SUBSCRIBE,'') print "Listening for events on %s ..." % bind_addr while True: try: try: mssg = socket.recv(zmq.NOBLOCK) print mssg except zmq.error.Again: time.sleep(0.001) except KeyboardInterrupt: break if __name__ == '__main__': main()
Replace spaces with tabs, quit on <Ctrl+C>
Replace spaces with tabs, quit on <Ctrl+C>
Python
lgpl-2.1
wheeler-microfluidics/dmf-device-ui
python
## Code Before: import sys import zmq import time def main(): port = 5000 if len(sys.argv) > 1: port = sys.argv[1] int(port) bind_addr = "tcp://localhost:%s" % port context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect(bind_addr) socket.setsockopt(zmq.SUBSCRIBE,'') print "Listening for events on %s ..." % bind_addr while True: mssg = socket.recv() print mssg if __name__ == '__main__': main() ## Instruction: Replace spaces with tabs, quit on <Ctrl+C> ## Code After: import sys import zmq import time def main(): port = 5000 if len(sys.argv) > 1: port = sys.argv[1] int(port) bind_addr = "tcp://localhost:%s" % port context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect(bind_addr) socket.setsockopt(zmq.SUBSCRIBE,'') print "Listening for events on %s ..." % bind_addr while True: try: try: mssg = socket.recv(zmq.NOBLOCK) print mssg except zmq.error.Again: time.sleep(0.001) except KeyboardInterrupt: break if __name__ == '__main__': main()
... def main(): port = 5000 if len(sys.argv) > 1: port = sys.argv[1] int(port) bind_addr = "tcp://localhost:%s" % port context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect(bind_addr) socket.setsockopt(zmq.SUBSCRIBE,'') print "Listening for events on %s ..." % bind_addr while True: try: try: mssg = socket.recv(zmq.NOBLOCK) print mssg except zmq.error.Again: time.sleep(0.001) except KeyboardInterrupt: break if __name__ == '__main__': main() ...
c539b7d9bb3e40f7ac69d44771f56476c953629d
Pod/Classes/Foundation/runtime/NSObject+ASPropertyAttributes.h
Pod/Classes/Foundation/runtime/NSObject+ASPropertyAttributes.h
// // NSObject+ASPropertyAttributes.h // AppScaffold Cocoa Category // // Created by Whirlwind on 15/4/3. // Copyright (c) 2015年 AppScaffold. All rights reserved. // #import <Foundation/Foundation.h> #if __has_include("EXTRuntimeExtensions.h") // This category need pod 'libextobjc' #import "EXTRuntimeExtensions.h" @interface NSObject (PropertyAttributes) + (ext_propertyAttributes *)copyPropertyAttributesByName:(NSString *)name; @end #endif
// // NSObject+ASPropertyAttributes.h // AppScaffold Cocoa Category // // Created by Whirlwind on 15/4/3. // Copyright (c) 2015年 AppScaffold. All rights reserved. // #import <Foundation/Foundation.h> #if __has_include(<libextobjc/EXTRuntimeExtensions.h>) // This category need pod 'libextobjc' #import <libextobjc/EXTRuntimeExtensions.h> @interface NSObject (PropertyAttributes) + (ext_propertyAttributes *)copyPropertyAttributesByName:(NSString *)name; @end #endif
Fix check the libextobjc when it is framework
Fix check the libextobjc when it is framework
C
mit
AppScaffold/ASCocoaCategory,AppScaffold/ASCocoaCategory,Whirlwind/ASCocoaCategory,Whirlwind/ASCocoaCategory,Whirlwind/ASCocoaCategory,AppScaffold/ASCocoaCategory
c
## Code Before: // // NSObject+ASPropertyAttributes.h // AppScaffold Cocoa Category // // Created by Whirlwind on 15/4/3. // Copyright (c) 2015年 AppScaffold. All rights reserved. // #import <Foundation/Foundation.h> #if __has_include("EXTRuntimeExtensions.h") // This category need pod 'libextobjc' #import "EXTRuntimeExtensions.h" @interface NSObject (PropertyAttributes) + (ext_propertyAttributes *)copyPropertyAttributesByName:(NSString *)name; @end #endif ## Instruction: Fix check the libextobjc when it is framework ## Code After: // // NSObject+ASPropertyAttributes.h // AppScaffold Cocoa Category // // Created by Whirlwind on 15/4/3. // Copyright (c) 2015年 AppScaffold. All rights reserved. // #import <Foundation/Foundation.h> #if __has_include(<libextobjc/EXTRuntimeExtensions.h>) // This category need pod 'libextobjc' #import <libextobjc/EXTRuntimeExtensions.h> @interface NSObject (PropertyAttributes) + (ext_propertyAttributes *)copyPropertyAttributesByName:(NSString *)name; @end #endif
// ... existing code ... #import <Foundation/Foundation.h> #if __has_include(<libextobjc/EXTRuntimeExtensions.h>) // This category need pod 'libextobjc' #import <libextobjc/EXTRuntimeExtensions.h> @interface NSObject (PropertyAttributes) // ... rest of the code ...
22eda7c2b844c9dccb31ad9cce882cc13d1adf75
apel_rest/urls.py
apel_rest/urls.py
"""This file maps url patterns to classes.""" from django.conf.urls import patterns, include, url from django.contrib import admin from api.views.CloudRecordSummaryView import CloudRecordSummaryView from api.views.CloudRecordView import CloudRecordView admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'apel_rest.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^api/v1/cloud/record$', CloudRecordView.as_view()), url(r'^api/v1/cloud/record/summary$', CloudRecordSummaryView.as_view()))
"""This file maps url patterns to classes.""" from django.conf.urls import patterns, include, url from django.contrib import admin from api.views.CloudRecordSummaryView import CloudRecordSummaryView from api.views.CloudRecordView import CloudRecordView admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'apel_rest.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls,)), url(r'^api/v1/cloud/record$', CloudRecordView.as_view(), name='CloudRecordView'), url(r'^api/v1/cloud/record/summary$', CloudRecordSummaryView.as_view(), name='CloudRecordSummaryView'))
Add name to patterns in urlpatterns
Add name to patterns in urlpatterns - so tests can use reverse()
Python
apache-2.0
apel/rest,apel/rest
python
## Code Before: """This file maps url patterns to classes.""" from django.conf.urls import patterns, include, url from django.contrib import admin from api.views.CloudRecordSummaryView import CloudRecordSummaryView from api.views.CloudRecordView import CloudRecordView admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'apel_rest.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^api/v1/cloud/record$', CloudRecordView.as_view()), url(r'^api/v1/cloud/record/summary$', CloudRecordSummaryView.as_view())) ## Instruction: Add name to patterns in urlpatterns - so tests can use reverse() ## Code After: """This file maps url patterns to classes.""" from django.conf.urls import patterns, include, url from django.contrib import admin from api.views.CloudRecordSummaryView import CloudRecordSummaryView from api.views.CloudRecordView import CloudRecordView admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'apel_rest.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls,)), url(r'^api/v1/cloud/record$', CloudRecordView.as_view(), name='CloudRecordView'), url(r'^api/v1/cloud/record/summary$', CloudRecordSummaryView.as_view(), name='CloudRecordSummaryView'))
# ... existing code ... # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls,)), url(r'^api/v1/cloud/record$', CloudRecordView.as_view(), name='CloudRecordView'), url(r'^api/v1/cloud/record/summary$', CloudRecordSummaryView.as_view(), name='CloudRecordSummaryView')) # ... rest of the code ...
f4f5852944d1fd1b9e96a70cb4496ee6e1e66dc0
genome_designer/main/celery_util.py
genome_designer/main/celery_util.py
from errno import errorcode from celery.task.control import inspect CELERY_ERROR_KEY = 'ERROR' def get_celery_worker_status(): """Checks whether celery is running and reports the error if not. Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running """ try: insp = inspect() d = insp.stats() if not d: d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' } except IOError as e: msg = "Error connecting to the backend: " + str(e) if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED': msg += ' Check that the RabbitMQ server is running.' d = { CELERY_ERROR_KEY: msg } except ImportError as e: d = { CELERY_ERROR_KEY: str(e)} return d
from errno import errorcode from celery.task.control import inspect from django.conf import settings CELERY_ERROR_KEY = 'ERROR' def get_celery_worker_status(): """Checks whether celery is running and reports the error if not. Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running """ if settings.BROKER_BACKEND == 'memory': # We are testing with in-memory celery. Celery is effectively running. return {} try: insp = inspect() d = insp.stats() if not d: d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' } except IOError as e: msg = "Error connecting to the backend: " + str(e) if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED': msg += ' Check that the RabbitMQ server is running.' d = { CELERY_ERROR_KEY: msg } except ImportError as e: d = { CELERY_ERROR_KEY: str(e)} return d
Fix tests: Allow for celery not to be running when doing in-memory celery for tests.
Fix tests: Allow for celery not to be running when doing in-memory celery for tests.
Python
mit
churchlab/millstone,churchlab/millstone,churchlab/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone
python
## Code Before: from errno import errorcode from celery.task.control import inspect CELERY_ERROR_KEY = 'ERROR' def get_celery_worker_status(): """Checks whether celery is running and reports the error if not. Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running """ try: insp = inspect() d = insp.stats() if not d: d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' } except IOError as e: msg = "Error connecting to the backend: " + str(e) if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED': msg += ' Check that the RabbitMQ server is running.' d = { CELERY_ERROR_KEY: msg } except ImportError as e: d = { CELERY_ERROR_KEY: str(e)} return d ## Instruction: Fix tests: Allow for celery not to be running when doing in-memory celery for tests. ## Code After: from errno import errorcode from celery.task.control import inspect from django.conf import settings CELERY_ERROR_KEY = 'ERROR' def get_celery_worker_status(): """Checks whether celery is running and reports the error if not. Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running """ if settings.BROKER_BACKEND == 'memory': # We are testing with in-memory celery. Celery is effectively running. return {} try: insp = inspect() d = insp.stats() if not d: d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' } except IOError as e: msg = "Error connecting to the backend: " + str(e) if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED': msg += ' Check that the RabbitMQ server is running.' d = { CELERY_ERROR_KEY: msg } except ImportError as e: d = { CELERY_ERROR_KEY: str(e)} return d
// ... existing code ... from errno import errorcode from celery.task.control import inspect from django.conf import settings CELERY_ERROR_KEY = 'ERROR' // ... modified code ... Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running """ if settings.BROKER_BACKEND == 'memory': # We are testing with in-memory celery. Celery is effectively running. return {} try: insp = inspect() d = insp.stats() // ... rest of the code ...
69ed3b02102fb019947532c45951282511517525
android/src/main/java/com/facebook/flipper/plugins/databases/impl/DefaultSqliteDatabaseProvider.java
android/src/main/java/com/facebook/flipper/plugins/databases/impl/DefaultSqliteDatabaseProvider.java
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.flipper.plugins.databases.impl; import android.content.Context; import java.io.File; import java.util.ArrayList; import java.util.List; public class DefaultSqliteDatabaseProvider implements SqliteDatabaseProvider { private Context context; public DefaultSqliteDatabaseProvider(Context context) { this.context = context; } @Override public List<File> getDatabaseFiles() { List<File> databaseFiles = new ArrayList<>(); for (String databaseName : context.databaseList()) { databaseFiles.add(context.getDatabasePath(databaseName)); } return databaseFiles; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.flipper.plugins.databases.impl; import android.content.Context; import java.io.File; import java.util.ArrayList; import java.util.List; public class DefaultSqliteDatabaseProvider implements SqliteDatabaseProvider { private static final int MAX_RECURSIVE_TRAVERSAL_DEPTH = 5; private static final String DB_EXTENSION = ".db"; private final int fileDirectoryRecursiveDepth; private final Context context; public DefaultSqliteDatabaseProvider(Context context) { this(context, MAX_RECURSIVE_TRAVERSAL_DEPTH); } public DefaultSqliteDatabaseProvider(Context context, int fileDirectoryRecursiveDepth) { this.context = context; this.fileDirectoryRecursiveDepth = fileDirectoryRecursiveDepth; } @Override public List<File> getDatabaseFiles() { List<File> databaseFiles = new ArrayList<>(); for (String databaseName : context.databaseList()) { databaseFiles.add(context.getDatabasePath(databaseName)); } addDatabaseFilesRecursively( new File(context.getFilesDir().getPath()), 0, DB_EXTENSION, fileDirectoryRecursiveDepth, databaseFiles); return databaseFiles; } private static void addDatabaseFilesRecursively( File directory, int depth, String dbExtension, int maxDepth, List<File> dbFiles) { if (depth >= maxDepth) { return; } File[] files = directory.listFiles(); if (files != null) { for (File f : files) { if (f.isFile() && f.getPath().endsWith(dbExtension)) { dbFiles.add(f); } else if (f.isDirectory()) { addDatabaseFilesRecursively(f, depth + 1, dbExtension, maxDepth, dbFiles); } } } } }
Add databases under /files to the list of databases visible by default in Flipper Database Plugin
Add databases under /files to the list of databases visible by default in Flipper Database Plugin Summary: Traverse recursively the /files directory and show all databases with .db extension in Flipper. Traversal depth is limited to 5 by default for performance reasons. Reviewed By: mweststrate Differential Revision: D28451609 fbshipit-source-id: de27c855fee220e0b79061c9b2df1eba6f5ef2af
Java
mit
facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper
java
## Code Before: /* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.flipper.plugins.databases.impl; import android.content.Context; import java.io.File; import java.util.ArrayList; import java.util.List; public class DefaultSqliteDatabaseProvider implements SqliteDatabaseProvider { private Context context; public DefaultSqliteDatabaseProvider(Context context) { this.context = context; } @Override public List<File> getDatabaseFiles() { List<File> databaseFiles = new ArrayList<>(); for (String databaseName : context.databaseList()) { databaseFiles.add(context.getDatabasePath(databaseName)); } return databaseFiles; } } ## Instruction: Add databases under /files to the list of databases visible by default in Flipper Database Plugin Summary: Traverse recursively the /files directory and show all databases with .db extension in Flipper. Traversal depth is limited to 5 by default for performance reasons. Reviewed By: mweststrate Differential Revision: D28451609 fbshipit-source-id: de27c855fee220e0b79061c9b2df1eba6f5ef2af ## Code After: /* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.flipper.plugins.databases.impl; import android.content.Context; import java.io.File; import java.util.ArrayList; import java.util.List; public class DefaultSqliteDatabaseProvider implements SqliteDatabaseProvider { private static final int MAX_RECURSIVE_TRAVERSAL_DEPTH = 5; private static final String DB_EXTENSION = ".db"; private final int fileDirectoryRecursiveDepth; private final Context context; public DefaultSqliteDatabaseProvider(Context context) { this(context, MAX_RECURSIVE_TRAVERSAL_DEPTH); } public DefaultSqliteDatabaseProvider(Context context, int fileDirectoryRecursiveDepth) { this.context = context; this.fileDirectoryRecursiveDepth = fileDirectoryRecursiveDepth; } @Override public List<File> getDatabaseFiles() { List<File> databaseFiles = new ArrayList<>(); for (String databaseName : context.databaseList()) { databaseFiles.add(context.getDatabasePath(databaseName)); } addDatabaseFilesRecursively( new File(context.getFilesDir().getPath()), 0, DB_EXTENSION, fileDirectoryRecursiveDepth, databaseFiles); return databaseFiles; } private static void addDatabaseFilesRecursively( File directory, int depth, String dbExtension, int maxDepth, List<File> dbFiles) { if (depth >= maxDepth) { return; } File[] files = directory.listFiles(); if (files != null) { for (File f : files) { if (f.isFile() && f.getPath().endsWith(dbExtension)) { dbFiles.add(f); } else if (f.isDirectory()) { addDatabaseFilesRecursively(f, depth + 1, dbExtension, maxDepth, dbFiles); } } } } }
// ... existing code ... import java.util.List; public class DefaultSqliteDatabaseProvider implements SqliteDatabaseProvider { private static final int MAX_RECURSIVE_TRAVERSAL_DEPTH = 5; private static final String DB_EXTENSION = ".db"; private final int fileDirectoryRecursiveDepth; private final Context context; public DefaultSqliteDatabaseProvider(Context context) { this(context, MAX_RECURSIVE_TRAVERSAL_DEPTH); } public DefaultSqliteDatabaseProvider(Context context, int fileDirectoryRecursiveDepth) { this.context = context; this.fileDirectoryRecursiveDepth = fileDirectoryRecursiveDepth; } @Override // ... modified code ... for (String databaseName : context.databaseList()) { databaseFiles.add(context.getDatabasePath(databaseName)); } addDatabaseFilesRecursively( new File(context.getFilesDir().getPath()), 0, DB_EXTENSION, fileDirectoryRecursiveDepth, databaseFiles); return databaseFiles; } private static void addDatabaseFilesRecursively( File directory, int depth, String dbExtension, int maxDepth, List<File> dbFiles) { if (depth >= maxDepth) { return; } File[] files = directory.listFiles(); if (files != null) { for (File f : files) { if (f.isFile() && f.getPath().endsWith(dbExtension)) { dbFiles.add(f); } else if (f.isDirectory()) { addDatabaseFilesRecursively(f, depth + 1, dbExtension, maxDepth, dbFiles); } } } } } // ... rest of the code ...
be517c8df23826d343b187a4a5cc3d1f81a06b53
test/framework/utils.py
test/framework/utils.py
import os, re from os.path import join as jp from .config import flow_graph_root_dir _http_re = re.compile(r'https?://[^/]*/') def replace_host_port(contains_url): return _http_re.sub('http://x.x/', contains_url) def flow_graph_dir(flow_name): """ Put the generated graph in the workspace root if running from Jenkins If running from commandline put it under config.flow_graph_root_dir/flow_name return: dir-name """ return '.' if os.environ.get('JOB_NAME') else jp(flow_graph_root_dir, flow_name)
import os, re from os.path import join as jp from .config import flow_graph_root_dir _http_re = re.compile(r'https?://.*?/job/') def replace_host_port(contains_url): return _http_re.sub('http://x.x/job/', contains_url) def flow_graph_dir(flow_name): """ Put the generated graph in the workspace root if running from Jenkins If running from commandline put it under config.flow_graph_root_dir/flow_name return: dir-name """ return '.' if os.environ.get('JOB_NAME') else jp(flow_graph_root_dir, flow_name)
Test framework fix - url replacing handles jenkins url with 'prefix'
Test framework fix - url replacing handles jenkins url with 'prefix'
Python
bsd-3-clause
lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow
python
## Code Before: import os, re from os.path import join as jp from .config import flow_graph_root_dir _http_re = re.compile(r'https?://[^/]*/') def replace_host_port(contains_url): return _http_re.sub('http://x.x/', contains_url) def flow_graph_dir(flow_name): """ Put the generated graph in the workspace root if running from Jenkins If running from commandline put it under config.flow_graph_root_dir/flow_name return: dir-name """ return '.' if os.environ.get('JOB_NAME') else jp(flow_graph_root_dir, flow_name) ## Instruction: Test framework fix - url replacing handles jenkins url with 'prefix' ## Code After: import os, re from os.path import join as jp from .config import flow_graph_root_dir _http_re = re.compile(r'https?://.*?/job/') def replace_host_port(contains_url): return _http_re.sub('http://x.x/job/', contains_url) def flow_graph_dir(flow_name): """ Put the generated graph in the workspace root if running from Jenkins If running from commandline put it under config.flow_graph_root_dir/flow_name return: dir-name """ return '.' if os.environ.get('JOB_NAME') else jp(flow_graph_root_dir, flow_name)
// ... existing code ... from .config import flow_graph_root_dir _http_re = re.compile(r'https?://.*?/job/') def replace_host_port(contains_url): return _http_re.sub('http://x.x/job/', contains_url) def flow_graph_dir(flow_name): // ... rest of the code ...
cf6034fc62cc97a5655b371fdef4a4728707fdea
changes/utils/locking.py
changes/utils/locking.py
from flask import current_app from functools import wraps from hashlib import md5 from changes.ext.redis import UnableToGetLock from changes.config import redis def lock(func): @wraps(func) def wrapped(**kwargs): key = '{0}:{1}'.format( func.__name__, md5( '&'.join('{0}={1}'.format(k, repr(v)) for k, v in sorted(kwargs.iteritems())) ).hexdigest() ) try: with redis.lock(key, timeout=1, expire=300, nowait=True): return func(**kwargs) except UnableToGetLock: current_app.logger.warn('Unable to get lock for %s', key) return wrapped
from flask import current_app from functools import wraps from hashlib import md5 from changes.ext.redis import UnableToGetLock from changes.config import redis def lock(func): @wraps(func) def wrapped(**kwargs): key = '{0}:{1}:{2}'.format( func.__module__, func.__name__, md5( '&'.join('{0}={1}'.format(k, repr(v)) for k, v in sorted(kwargs.iteritems())) ).hexdigest() ) try: with redis.lock(key, timeout=1, expire=300, nowait=True): return func(**kwargs) except UnableToGetLock: current_app.logger.warn('Unable to get lock for %s', key) return wrapped
Use __module__ to make @lock unique
Use __module__ to make @lock unique Summary: Fixes T49428. Test Plan: Hard to test on changes_dev because it can't run both handlers (no place to send notifications to), but this seems simple enough... Reviewers: armooo, kylec Reviewed By: kylec Subscribers: changesbot, mkedia, jukka, vishal Maniphest Tasks: T49428 Differential Revision: https://tails.corp.dropbox.com/D122408
Python
apache-2.0
bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes
python
## Code Before: from flask import current_app from functools import wraps from hashlib import md5 from changes.ext.redis import UnableToGetLock from changes.config import redis def lock(func): @wraps(func) def wrapped(**kwargs): key = '{0}:{1}'.format( func.__name__, md5( '&'.join('{0}={1}'.format(k, repr(v)) for k, v in sorted(kwargs.iteritems())) ).hexdigest() ) try: with redis.lock(key, timeout=1, expire=300, nowait=True): return func(**kwargs) except UnableToGetLock: current_app.logger.warn('Unable to get lock for %s', key) return wrapped ## Instruction: Use __module__ to make @lock unique Summary: Fixes T49428. Test Plan: Hard to test on changes_dev because it can't run both handlers (no place to send notifications to), but this seems simple enough... Reviewers: armooo, kylec Reviewed By: kylec Subscribers: changesbot, mkedia, jukka, vishal Maniphest Tasks: T49428 Differential Revision: https://tails.corp.dropbox.com/D122408 ## Code After: from flask import current_app from functools import wraps from hashlib import md5 from changes.ext.redis import UnableToGetLock from changes.config import redis def lock(func): @wraps(func) def wrapped(**kwargs): key = '{0}:{1}:{2}'.format( func.__module__, func.__name__, md5( '&'.join('{0}={1}'.format(k, repr(v)) for k, v in sorted(kwargs.iteritems())) ).hexdigest() ) try: with redis.lock(key, timeout=1, expire=300, nowait=True): return func(**kwargs) except UnableToGetLock: current_app.logger.warn('Unable to get lock for %s', key) return wrapped
// ... existing code ... def lock(func): @wraps(func) def wrapped(**kwargs): key = '{0}:{1}:{2}'.format( func.__module__, func.__name__, md5( '&'.join('{0}={1}'.format(k, repr(v)) // ... rest of the code ...
e118ee78b534a83b33f91b27cfc1f75d64e8e924
test_utils/testmaker/base_serializer.py
test_utils/testmaker/base_serializer.py
import cPickle as pickle import logging import time ser = logging.getLogger('testserializer') class Serializer(object): """A pluggable Serializer class""" name = "default" def __init__(self, name='default'): """Constructor""" self.data = {} self.name = name def save_request(self, request): """Saves the Request to the serialization stream""" request_dict = { 'name': self.name, 'time': time.time(), 'path': request.path, 'get': request.GET, 'post': request.POST, 'arg_dict': request.REQUEST, } ser.info(pickle.dumps(request_dict)) ser.info('---REQUEST_BREAK---') def save_response(self, path, response): """Saves the Response-like objects information that might be tested""" response_dict = { 'name': self.name, 'time': time.time(), 'path': path, 'context': response.context, 'content': response.content, 'status_code': response.status_code, 'cookies': response.cookies, 'headers': response._headers, } try: ser.info(pickle.dumps(response_dict)) ser.info('---RESPONSE_BREAK---') except (TypeError, pickle.PicklingError): #Can't pickle wsgi.error objects pass
import cPickle as pickle import logging import time class Serializer(object): """A pluggable Serializer class""" name = "default" def __init__(self, name='default'): """Constructor""" self.ser = logging.getLogger('testserializer') self.data = {} self.name = name def process_request(self, request): request_dict = { 'name': self.name, 'time': time.time(), 'path': request.path, 'get': request.GET, 'post': request.POST, 'arg_dict': request.REQUEST, 'method': request.method, } return request_dict def save_request(self, request): """Saves the Request to the serialization stream""" request_dict = self.process_request(request) self.ser.info(pickle.dumps(request_dict)) self.ser.info('---REQUEST_BREAK---') def process_response(self, path, response): response_dict = { 'name': self.name, 'time': time.time(), 'path': path, 'context': response.context, 'content': response.content, 'status_code': response.status_code, 'cookies': response.cookies, 'headers': response._headers, } return response_dict def save_response(self, path, response): """Saves the Response-like objects information that might be tested""" response_dict = self.process_response(path, response) try: self.ser.info(pickle.dumps(response_dict)) self.ser.info('---RESPONSE_BREAK---') except (TypeError, pickle.PicklingError): #Can't pickle wsgi.error objects pass
Move serializer into the class so it can be subclassed.
Move serializer into the class so it can be subclassed.
Python
mit
frac/django-test-utils,acdha/django-test-utils,ericholscher/django-test-utils,frac/django-test-utils,ericholscher/django-test-utils,acdha/django-test-utils
python
## Code Before: import cPickle as pickle import logging import time ser = logging.getLogger('testserializer') class Serializer(object): """A pluggable Serializer class""" name = "default" def __init__(self, name='default'): """Constructor""" self.data = {} self.name = name def save_request(self, request): """Saves the Request to the serialization stream""" request_dict = { 'name': self.name, 'time': time.time(), 'path': request.path, 'get': request.GET, 'post': request.POST, 'arg_dict': request.REQUEST, } ser.info(pickle.dumps(request_dict)) ser.info('---REQUEST_BREAK---') def save_response(self, path, response): """Saves the Response-like objects information that might be tested""" response_dict = { 'name': self.name, 'time': time.time(), 'path': path, 'context': response.context, 'content': response.content, 'status_code': response.status_code, 'cookies': response.cookies, 'headers': response._headers, } try: ser.info(pickle.dumps(response_dict)) ser.info('---RESPONSE_BREAK---') except (TypeError, pickle.PicklingError): #Can't pickle wsgi.error objects pass ## Instruction: Move serializer into the class so it can be subclassed. ## Code After: import cPickle as pickle import logging import time class Serializer(object): """A pluggable Serializer class""" name = "default" def __init__(self, name='default'): """Constructor""" self.ser = logging.getLogger('testserializer') self.data = {} self.name = name def process_request(self, request): request_dict = { 'name': self.name, 'time': time.time(), 'path': request.path, 'get': request.GET, 'post': request.POST, 'arg_dict': request.REQUEST, 'method': request.method, } return request_dict def save_request(self, request): """Saves the Request to the serialization stream""" request_dict = self.process_request(request) self.ser.info(pickle.dumps(request_dict)) self.ser.info('---REQUEST_BREAK---') def process_response(self, path, response): response_dict = { 'name': self.name, 'time': time.time(), 'path': path, 'context': response.context, 'content': response.content, 'status_code': response.status_code, 'cookies': response.cookies, 'headers': response._headers, } return response_dict def save_response(self, path, response): """Saves the Response-like objects information that might be tested""" response_dict = self.process_response(path, response) try: self.ser.info(pickle.dumps(response_dict)) self.ser.info('---RESPONSE_BREAK---') except (TypeError, pickle.PicklingError): #Can't pickle wsgi.error objects pass
... import logging import time class Serializer(object): """A pluggable Serializer class""" ... def __init__(self, name='default'): """Constructor""" self.ser = logging.getLogger('testserializer') self.data = {} self.name = name def process_request(self, request): request_dict = { 'name': self.name, 'time': time.time(), 'path': request.path, 'get': request.GET, 'post': request.POST, 'arg_dict': request.REQUEST, 'method': request.method, } return request_dict def save_request(self, request): """Saves the Request to the serialization stream""" request_dict = self.process_request(request) self.ser.info(pickle.dumps(request_dict)) self.ser.info('---REQUEST_BREAK---') def process_response(self, path, response): response_dict = { 'name': self.name, 'time': time.time(), ... 'cookies': response.cookies, 'headers': response._headers, } return response_dict def save_response(self, path, response): """Saves the Response-like objects information that might be tested""" response_dict = self.process_response(path, response) try: self.ser.info(pickle.dumps(response_dict)) self.ser.info('---RESPONSE_BREAK---') except (TypeError, pickle.PicklingError): #Can't pickle wsgi.error objects pass ...
1d80b38e935401e40bdca2f3faaec4aeba77a716
src/random.h
src/random.h
/** * @file random.h * @ingroup CppAlgorithms * @brief Random utility functions. * * Copyright (c) 2013 Sebastien Rombauts ([email protected]) * * Distributed under the MIT License (MIT) (See accompanying file LICENSE.txt * or copy at http://opensource.org/licenses/MIT) */ /** * @brief Random utility functions. */ class Random { public: /** * @brief Generate a printable alphanumeric character. */ static char GenChar(); /** * @brief Generate a printable alphanumeric string. */ static void GenString(char* str, size_t len); };
/** * @file random.h * @ingroup CppAlgorithms * @brief Random utility functions. * * Copyright (c) 2013 Sebastien Rombauts ([email protected]) * * Distributed under the MIT License (MIT) (See accompanying file LICENSE.txt * or copy at http://opensource.org/licenses/MIT) */ #include <cstddef> // size_t /** * @brief Random utility functions. */ class Random { public: /** * @brief Generate a printable alphanumeric character. */ static char GenChar(); /** * @brief Generate a printable alphanumeric string. */ static void GenString(char* str, size_t len); };
Fix Travis build (missing include for size_t)
Fix Travis build (missing include for size_t)
C
mit
SRombauts/cpp-algorithms,SRombauts/cpp-algorithms
c
## Code Before: /** * @file random.h * @ingroup CppAlgorithms * @brief Random utility functions. * * Copyright (c) 2013 Sebastien Rombauts ([email protected]) * * Distributed under the MIT License (MIT) (See accompanying file LICENSE.txt * or copy at http://opensource.org/licenses/MIT) */ /** * @brief Random utility functions. */ class Random { public: /** * @brief Generate a printable alphanumeric character. */ static char GenChar(); /** * @brief Generate a printable alphanumeric string. */ static void GenString(char* str, size_t len); }; ## Instruction: Fix Travis build (missing include for size_t) ## Code After: /** * @file random.h * @ingroup CppAlgorithms * @brief Random utility functions. * * Copyright (c) 2013 Sebastien Rombauts ([email protected]) * * Distributed under the MIT License (MIT) (See accompanying file LICENSE.txt * or copy at http://opensource.org/licenses/MIT) */ #include <cstddef> // size_t /** * @brief Random utility functions. */ class Random { public: /** * @brief Generate a printable alphanumeric character. */ static char GenChar(); /** * @brief Generate a printable alphanumeric string. */ static void GenString(char* str, size_t len); };
# ... existing code ... * Distributed under the MIT License (MIT) (See accompanying file LICENSE.txt * or copy at http://opensource.org/licenses/MIT) */ #include <cstddef> // size_t /** * @brief Random utility functions. # ... rest of the code ...
9f598b0163a7ef6392b1ea67bde43f84fd9efbb8
myflaskapp/tests/functional_tests.py
myflaskapp/tests/functional_tests.py
from selenium import webdriver browser = webdriver.Chrome() browser.get('http://localhost:5000') assert 'tdd_with_python' in browser.title
from selenium import webdriver browser = webdriver.Chrome() # Edith has heard about a cool new online to-do app. She goes # to check out #its homepage browser.get('http://localhost:5000') # She notices the page title and header mention to-do lists assert 'To-Do' in browser.title # She is invited to enter a to-do item straight away # She types "Buy peacock feathers" into a text box (Edith's hobby # is tying fly-fishing lures) # When she hits enter, the page updates, and now the page lists # "1: Buy peacock feathers" as an item in a to-do list # There is still a text box inviting her to add another item. She # enters "Use peacock feathers to make a fly" (Edith is very methodical) # The page updates again, and now shows both items on her list # Edith wonders whether the site will remember her list. Then she sees # that the site has generated a unique URL for her -- there is some # explanatory text to that effect. # She visits that URL - her to-do list is still there. # Satisfied, she goes back to sleep browser.quit()
Change title test, add comments of To-do user story
Change title test, add comments of To-do user story
Python
mit
terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python
python
## Code Before: from selenium import webdriver browser = webdriver.Chrome() browser.get('http://localhost:5000') assert 'tdd_with_python' in browser.title ## Instruction: Change title test, add comments of To-do user story ## Code After: from selenium import webdriver browser = webdriver.Chrome() # Edith has heard about a cool new online to-do app. She goes # to check out #its homepage browser.get('http://localhost:5000') # She notices the page title and header mention to-do lists assert 'To-Do' in browser.title # She is invited to enter a to-do item straight away # She types "Buy peacock feathers" into a text box (Edith's hobby # is tying fly-fishing lures) # When she hits enter, the page updates, and now the page lists # "1: Buy peacock feathers" as an item in a to-do list # There is still a text box inviting her to add another item. She # enters "Use peacock feathers to make a fly" (Edith is very methodical) # The page updates again, and now shows both items on her list # Edith wonders whether the site will remember her list. Then she sees # that the site has generated a unique URL for her -- there is some # explanatory text to that effect. # She visits that URL - her to-do list is still there. # Satisfied, she goes back to sleep browser.quit()
... from selenium import webdriver browser = webdriver.Chrome() # Edith has heard about a cool new online to-do app. She goes # to check out #its homepage browser.get('http://localhost:5000') # She notices the page title and header mention to-do lists assert 'To-Do' in browser.title # She is invited to enter a to-do item straight away # She types "Buy peacock feathers" into a text box (Edith's hobby # is tying fly-fishing lures) # When she hits enter, the page updates, and now the page lists # "1: Buy peacock feathers" as an item in a to-do list # There is still a text box inviting her to add another item. She # enters "Use peacock feathers to make a fly" (Edith is very methodical) # The page updates again, and now shows both items on her list # Edith wonders whether the site will remember her list. Then she sees # that the site has generated a unique URL for her -- there is some # explanatory text to that effect. # She visits that URL - her to-do list is still there. # Satisfied, she goes back to sleep browser.quit() ...
75d7441f90e077eeeb955e4eb0c514a1736a88fb
tohu/v3/utils.py
tohu/v3/utils.py
__all__ = ['identity', 'print_generated_sequence'] def identity(x): """ Helper function which returns its argument unchanged. That is, `identity(x)` returns `x` for any input `x`. """ return x def print_generated_sequence(gen, num, *, sep=", ", seed=None): """ Helper function which prints a sequence of `num` items produced by the random generator `gen`. """ if seed: gen.reset(seed) elems = [str(next(gen)) for _ in range(num)] sep_initial = "\n\n" if '\n' in sep else " " print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
from collections import namedtuple __all__ = ['identity', 'print_generated_sequence'] def identity(x): """ Helper function which returns its argument unchanged. That is, `identity(x)` returns `x` for any input `x`. """ return x def print_generated_sequence(gen, num, *, sep=", ", seed=None): """ Helper function which prints a sequence of `num` items produced by the random generator `gen`. """ if seed: gen.reset(seed) elems = [str(next(gen)) for _ in range(num)] sep_initial = "\n\n" if '\n' in sep else " " print("Generated sequence:{}{}".format(sep_initial, sep.join(elems))) def make_dummy_tuples(chars='abcde'): Quux = namedtuple('Quux', ['x', 'y']) some_tuples = [Quux((c*2).upper(), c*2) for c in chars] return some_tuples
Add helper function to produce some dummy tuples (for testing and debugging)
Add helper function to produce some dummy tuples (for testing and debugging)
Python
mit
maxalbert/tohu
python
## Code Before: __all__ = ['identity', 'print_generated_sequence'] def identity(x): """ Helper function which returns its argument unchanged. That is, `identity(x)` returns `x` for any input `x`. """ return x def print_generated_sequence(gen, num, *, sep=", ", seed=None): """ Helper function which prints a sequence of `num` items produced by the random generator `gen`. """ if seed: gen.reset(seed) elems = [str(next(gen)) for _ in range(num)] sep_initial = "\n\n" if '\n' in sep else " " print("Generated sequence:{}{}".format(sep_initial, sep.join(elems))) ## Instruction: Add helper function to produce some dummy tuples (for testing and debugging) ## Code After: from collections import namedtuple __all__ = ['identity', 'print_generated_sequence'] def identity(x): """ Helper function which returns its argument unchanged. That is, `identity(x)` returns `x` for any input `x`. """ return x def print_generated_sequence(gen, num, *, sep=", ", seed=None): """ Helper function which prints a sequence of `num` items produced by the random generator `gen`. """ if seed: gen.reset(seed) elems = [str(next(gen)) for _ in range(num)] sep_initial = "\n\n" if '\n' in sep else " " print("Generated sequence:{}{}".format(sep_initial, sep.join(elems))) def make_dummy_tuples(chars='abcde'): Quux = namedtuple('Quux', ['x', 'y']) some_tuples = [Quux((c*2).upper(), c*2) for c in chars] return some_tuples
// ... existing code ... from collections import namedtuple __all__ = ['identity', 'print_generated_sequence'] // ... modified code ... elems = [str(next(gen)) for _ in range(num)] sep_initial = "\n\n" if '\n' in sep else " " print("Generated sequence:{}{}".format(sep_initial, sep.join(elems))) def make_dummy_tuples(chars='abcde'): Quux = namedtuple('Quux', ['x', 'y']) some_tuples = [Quux((c*2).upper(), c*2) for c in chars] return some_tuples // ... rest of the code ...
f5543f10208ed4eef9d0f1a0a208e03e72709f40
windpowerlib/wind_farm.py
windpowerlib/wind_farm.py
__copyright__ = "Copyright oemof developer group" __license__ = "GPLv3" import numpy as np class WindFarm(object): """ """ def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates, power_curve=None, power_output=None): self.wind_farm_name = wind_farm_name self.wind_turbine_fleet = wind_turbine_fleet self.coordinates = coordinates self.power_curve = power_curve self.power_output = power_output # def wind_park_p_curve(self): # p_curve = np.sum([self.wind_turbines[i].power_curve # for i in range(len(self.wind_turbines))], axis=0) # return p_curve
__copyright__ = "Copyright oemof developer group" __license__ = "GPLv3" import numpy as np class WindFarm(object): """ def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates): self.wind_farm_name = wind_farm_name self.wind_turbine_fleet = wind_turbine_fleet self.coordinates = coordinates self.power_curve = None self.power_output = None # def wind_park_p_curve(self): # p_curve = np.sum([self.wind_turbines[i].power_curve # for i in range(len(self.wind_turbines))], axis=0) # return p_curve
Change parameters power_curve and power_output to attributes
Change parameters power_curve and power_output to attributes
Python
mit
wind-python/windpowerlib
python
## Code Before: __copyright__ = "Copyright oemof developer group" __license__ = "GPLv3" import numpy as np class WindFarm(object): """ """ def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates, power_curve=None, power_output=None): self.wind_farm_name = wind_farm_name self.wind_turbine_fleet = wind_turbine_fleet self.coordinates = coordinates self.power_curve = power_curve self.power_output = power_output # def wind_park_p_curve(self): # p_curve = np.sum([self.wind_turbines[i].power_curve # for i in range(len(self.wind_turbines))], axis=0) # return p_curve ## Instruction: Change parameters power_curve and power_output to attributes ## Code After: __copyright__ = "Copyright oemof developer group" __license__ = "GPLv3" import numpy as np class WindFarm(object): """ def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates): self.wind_farm_name = wind_farm_name self.wind_turbine_fleet = wind_turbine_fleet self.coordinates = coordinates self.power_curve = None self.power_output = None # def wind_park_p_curve(self): # p_curve = np.sum([self.wind_turbines[i].power_curve # for i in range(len(self.wind_turbines))], axis=0) # return p_curve
# ... existing code ... class WindFarm(object): """ def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates): self.wind_farm_name = wind_farm_name self.wind_turbine_fleet = wind_turbine_fleet self.coordinates = coordinates self.power_curve = None self.power_output = None # def wind_park_p_curve(self): # p_curve = np.sum([self.wind_turbines[i].power_curve # ... rest of the code ...
3f81676d8bc39b459d98a1a91b9ced97be58451d
celestial/exoplanets_importer.py
celestial/exoplanets_importer.py
import requests import csv from models import Planet, SolarSystem from django.core.exceptions import ValidationError class ExoplanetsImporter: @staticmethod def run(filename = None): if filename!=None: csv_data = open(filename) else: csv_data = requests.get('http://exoplanets.org/exoplanets.csv') rows = csv.reader(csv_data) headers = {} got_headers = False for row in rows: if got_headers == 0: # Store headers colnum = 0 for col in row: headers[col] = colnum colnum += 1 got_headers = True else: # Find and store system data stardata = { 'name': row[headers['STAR']], 'temperature': row[headers['TEFF']] or None } try: system, created = SolarSystem.objects.get_or_create(**stardata) except ValidationError: print stardata raise # Find and store planet data planetdata = { 'name': row[headers['NAME']], 'radius': row[headers['R']] or None, #'temperature': row[headers['NAME']], 'semi_major_axis': row[headers['A']], 'solar_system': system } try: planet, created = Planet.objects.get_or_create(**planetdata) except ValidationError: print planetdata raise
import requests import csv from models import Planet, SolarSystem from django.core.exceptions import ValidationError class ExoplanetsImporter: @staticmethod def run(filename = None): if filename!=None: csv_data = open(filename) else: csv_data = requests.get('http://exoplanets.org/exoplanets.csv') rows = csv.reader(csv_data) headers = {} got_headers = False for row in rows: if got_headers == 0: # Store headers colnum = 0 for col in row: headers[col] = colnum colnum += 1 got_headers = True else: # Find and store system data try: system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']]) system.temperature = row[headers['TEFF']] or None system.save() except ValidationError: print stardata raise # Find and store planet data try: planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system) planet.radius = row[headers['R']] or None planet.semi_major_axis = row[headers['A']] planet.save() except ValidationError: print planetdata raise
Refactor importer slightly to avoid creation problems
Refactor importer slightly to avoid creation problems
Python
mit
Floppy/kepler-explorer,Floppy/kepler-explorer,Floppy/kepler-explorer
python
## Code Before: import requests import csv from models import Planet, SolarSystem from django.core.exceptions import ValidationError class ExoplanetsImporter: @staticmethod def run(filename = None): if filename!=None: csv_data = open(filename) else: csv_data = requests.get('http://exoplanets.org/exoplanets.csv') rows = csv.reader(csv_data) headers = {} got_headers = False for row in rows: if got_headers == 0: # Store headers colnum = 0 for col in row: headers[col] = colnum colnum += 1 got_headers = True else: # Find and store system data stardata = { 'name': row[headers['STAR']], 'temperature': row[headers['TEFF']] or None } try: system, created = SolarSystem.objects.get_or_create(**stardata) except ValidationError: print stardata raise # Find and store planet data planetdata = { 'name': row[headers['NAME']], 'radius': row[headers['R']] or None, #'temperature': row[headers['NAME']], 'semi_major_axis': row[headers['A']], 'solar_system': system } try: planet, created = Planet.objects.get_or_create(**planetdata) except ValidationError: print planetdata raise ## Instruction: Refactor importer slightly to avoid creation problems ## Code After: import requests import csv from models import Planet, SolarSystem from django.core.exceptions import ValidationError class ExoplanetsImporter: @staticmethod def run(filename = None): if filename!=None: csv_data = open(filename) else: csv_data = requests.get('http://exoplanets.org/exoplanets.csv') rows = csv.reader(csv_data) headers = {} got_headers = False for row in rows: if got_headers == 0: # Store headers colnum = 0 for col in row: headers[col] = colnum colnum += 1 got_headers = True else: # Find and store system data try: system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']]) system.temperature = row[headers['TEFF']] or None system.save() except ValidationError: print stardata raise # Find and store planet data try: planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system) planet.radius = row[headers['R']] or None planet.semi_major_axis = row[headers['A']] planet.save() except ValidationError: print planetdata raise
... got_headers = True else: # Find and store system data try: system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']]) system.temperature = row[headers['TEFF']] or None system.save() except ValidationError: print stardata raise # Find and store planet data try: planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system) planet.radius = row[headers['R']] or None planet.semi_major_axis = row[headers['A']] planet.save() except ValidationError: print planetdata raise ...
31ea614e783273ef14919d1628a7ada11e8850fd
apps/users/adapters.py
apps/users/adapters.py
import re from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email): def get_receivers(self): return [self.object] @property def template_name(self): return self.kwargs['template_name'] def get_context(self): context = super().get_context() context['contact_email'] = settings.CONTACT_EMAIL return context class AccountAdapter(DefaultAccountAdapter): username_regex = re.compile(USERNAME_REGEX) error_messages = dict( DefaultAccountAdapter.error_messages, invalid_username=USERNAME_INVALID_MESSAGE ) def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send_sync( user, template_name=template_prefix, **context )
import re from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from adhocracy4.emails.mixins import SyncEmailMixin from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email, SyncEmailMixin): def get_receivers(self): return [self.object] @property def template_name(self): return self.kwargs['template_name'] def get_context(self): context = super().get_context() context['contact_email'] = settings.CONTACT_EMAIL return context class AccountAdapter(DefaultAccountAdapter): username_regex = re.compile(USERNAME_REGEX) error_messages = dict( DefaultAccountAdapter.error_messages, invalid_username=USERNAME_INVALID_MESSAGE ) def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send( user, template_name=template_prefix, **context )
Use SyncEmailMixin for account mails
Use SyncEmailMixin for account mails
Python
agpl-3.0
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
python
## Code Before: import re from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email): def get_receivers(self): return [self.object] @property def template_name(self): return self.kwargs['template_name'] def get_context(self): context = super().get_context() context['contact_email'] = settings.CONTACT_EMAIL return context class AccountAdapter(DefaultAccountAdapter): username_regex = re.compile(USERNAME_REGEX) error_messages = dict( DefaultAccountAdapter.error_messages, invalid_username=USERNAME_INVALID_MESSAGE ) def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send_sync( user, template_name=template_prefix, **context ) ## Instruction: Use SyncEmailMixin for account mails ## Code After: import re from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from adhocracy4.emails.mixins import SyncEmailMixin from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email, SyncEmailMixin): def get_receivers(self): return [self.object] @property def template_name(self): return self.kwargs['template_name'] def get_context(self): context = super().get_context() context['contact_email'] = settings.CONTACT_EMAIL return context class AccountAdapter(DefaultAccountAdapter): username_regex = re.compile(USERNAME_REGEX) error_messages = dict( DefaultAccountAdapter.error_messages, invalid_username=USERNAME_INVALID_MESSAGE ) def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send( user, template_name=template_prefix, **context )
// ... existing code ... from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from adhocracy4.emails.mixins import SyncEmailMixin from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email, SyncEmailMixin): def get_receivers(self): return [self.object] // ... modified code ... def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send( user, template_name=template_prefix, **context // ... rest of the code ...
175d100a6fb57d9818ba8580501c4cee8a91c574
src/main/java/editor/util/UndoableAction.java
src/main/java/editor/util/UndoableAction.java
package editor.util; import java.util.function.Supplier; /** * This class represents an action that can be undone, which is done by simply * using two actions that are semantically supposed to be opposites. Each * action can optionally return a value. * * @param <R> information to be returned by the action * @param <U> information to be returned by the undone action * @author Alec Roelke */ public interface UndoableAction<R, U> { /** * Create a new undoable action from the given functions. * * @param <R> return type of the forward action * @param <U> return type of the reverse action * @param forward forward action to perform * @param reverse reverse action to perform * @return The new UndoableAction. */ public static <R, U> UndoableAction<R, U> createAction(Supplier<R> forward, Supplier<U> reverse) { return new UndoableAction<>() { public R redo() { return forward.get(); } public U undo() { return reverse.get(); } }; } /** * Perform the action (or redo it if it has been undone). * * @return a value containing information about the result of performing * the action. */ R redo(); /** * Undo the action. * * @return a value containing information about the result undoing the action. */ U undo(); }
package editor.util; import java.util.function.Supplier; /** * This class represents an action that can be undone, which is done by simply * using two actions that are semantically supposed to be opposites. Each * action can optionally return a value. * * @param <R> information to be returned by the action * @param <U> information to be returned by the undone action * @author Alec Roelke */ public interface UndoableAction<R, U> { /** * Create a new undoable action from the given functions. * * @param <R> return type of the forward action * @param <U> return type of the reverse action * @param forward forward action to perform * @param reverse reverse action to perform * @return The new UndoableAction. */ public static <R, U> UndoableAction<R, U> createAction(Supplier<R> forward, Supplier<U> reverse) { return new UndoableAction<>() { @Override public R redo() { return forward.get(); } @Override public U undo() { return reverse.get(); } }; } /** * Perform the action (or redo it if it has been undone). * * @return a value containing information about the result of performing * the action. */ R redo(); /** * Undo the action. * * @return a value containing information about the result undoing the action. */ U undo(); }
Add @Override where it's needed
Add @Override where it's needed
Java
mit
aroelke/deck-editor-java
java
## Code Before: package editor.util; import java.util.function.Supplier; /** * This class represents an action that can be undone, which is done by simply * using two actions that are semantically supposed to be opposites. Each * action can optionally return a value. * * @param <R> information to be returned by the action * @param <U> information to be returned by the undone action * @author Alec Roelke */ public interface UndoableAction<R, U> { /** * Create a new undoable action from the given functions. * * @param <R> return type of the forward action * @param <U> return type of the reverse action * @param forward forward action to perform * @param reverse reverse action to perform * @return The new UndoableAction. */ public static <R, U> UndoableAction<R, U> createAction(Supplier<R> forward, Supplier<U> reverse) { return new UndoableAction<>() { public R redo() { return forward.get(); } public U undo() { return reverse.get(); } }; } /** * Perform the action (or redo it if it has been undone). * * @return a value containing information about the result of performing * the action. */ R redo(); /** * Undo the action. * * @return a value containing information about the result undoing the action. */ U undo(); } ## Instruction: Add @Override where it's needed ## Code After: package editor.util; import java.util.function.Supplier; /** * This class represents an action that can be undone, which is done by simply * using two actions that are semantically supposed to be opposites. Each * action can optionally return a value. * * @param <R> information to be returned by the action * @param <U> information to be returned by the undone action * @author Alec Roelke */ public interface UndoableAction<R, U> { /** * Create a new undoable action from the given functions. * * @param <R> return type of the forward action * @param <U> return type of the reverse action * @param forward forward action to perform * @param reverse reverse action to perform * @return The new UndoableAction. */ public static <R, U> UndoableAction<R, U> createAction(Supplier<R> forward, Supplier<U> reverse) { return new UndoableAction<>() { @Override public R redo() { return forward.get(); } @Override public U undo() { return reverse.get(); } }; } /** * Perform the action (or redo it if it has been undone). * * @return a value containing information about the result of performing * the action. */ R redo(); /** * Undo the action. * * @return a value containing information about the result undoing the action. */ U undo(); }
// ... existing code ... { return new UndoableAction<>() { @Override public R redo() { return forward.get(); } @Override public U undo() { return reverse.get(); } }; } // ... rest of the code ...
bce093df2bbcf12d8eec8f812408a0ea88521d10
squid_url_cleaner.py
squid_url_cleaner.py
import sys from url_cleaner import removeBlackListedParameters while True: line = sys.stdin.readline().strip() urlList = line.split(' ') urlInput = urlList[0] newUrl = removeBlackListedParameters(urlInput) sys.stdout.write('%s%s' % (newUrl, '\n')) sys.stdout.flush()
import sys import signal from url_cleaner import removeBlackListedParameters def sig_handle(signal, frame): sys.exit(0) while True: signal.signal(signal.SIGINT, sig_handle) signal.signal(signal.SIGTERM, sig_handle) try: line = sys.stdin.readline().strip() urlList = line.split() urlInput = urlList[0] newUrl = removeBlackListedParameters(urlInput) sys.stdout.write('%s%s'.format(newUrl, '\n')) sys.stdout.flush() except Exception: continue
Handle signals for daemon processes, removed deprecated python var sub
Handle signals for daemon processes, removed deprecated python var sub
Python
mit
Ladoo/url_cleaner
python
## Code Before: import sys from url_cleaner import removeBlackListedParameters while True: line = sys.stdin.readline().strip() urlList = line.split(' ') urlInput = urlList[0] newUrl = removeBlackListedParameters(urlInput) sys.stdout.write('%s%s' % (newUrl, '\n')) sys.stdout.flush() ## Instruction: Handle signals for daemon processes, removed deprecated python var sub ## Code After: import sys import signal from url_cleaner import removeBlackListedParameters def sig_handle(signal, frame): sys.exit(0) while True: signal.signal(signal.SIGINT, sig_handle) signal.signal(signal.SIGTERM, sig_handle) try: line = sys.stdin.readline().strip() urlList = line.split() urlInput = urlList[0] newUrl = removeBlackListedParameters(urlInput) sys.stdout.write('%s%s'.format(newUrl, '\n')) sys.stdout.flush() except Exception: continue
// ... existing code ... import sys import signal from url_cleaner import removeBlackListedParameters def sig_handle(signal, frame): sys.exit(0) while True: signal.signal(signal.SIGINT, sig_handle) signal.signal(signal.SIGTERM, sig_handle) try: line = sys.stdin.readline().strip() urlList = line.split() urlInput = urlList[0] newUrl = removeBlackListedParameters(urlInput) sys.stdout.write('%s%s'.format(newUrl, '\n')) sys.stdout.flush() except Exception: continue // ... rest of the code ...
316066b2415861b65d540b822df1b2afea906207
regulations/management/commands/setup_cors.py
regulations/management/commands/setup_cors.py
import boto3 from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Set CORS rules on the Notice and Comment attachment bucket' def handle(self, *args, **options): session = boto3.Session( aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID, aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY, ) s3 = session.client('s3') s3.put_bucket_cors( Bucket=settings.ATTACHMENT_BUCKET, CORSConfiguration={ 'CORSRules': [ { 'AllowedMethods': ['GET', 'PUT'], 'AllowedOrigins': settings.ALLOWED_HOSTS or ['*'], 'AllowedHeaders': ['*'], }, ], }, )
import boto3 from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Set CORS rules on the Notice and Comment attachment bucket' def handle(self, *args, **options): hosts = settings.ALLOWED_HOSTS origins = ['http://' + host for host in hosts] origins = origins + ['https://' + host for host in hosts] session = boto3.Session( aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID, aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY, ) s3 = session.client('s3') s3.put_bucket_cors( Bucket=settings.ATTACHMENT_BUCKET, CORSConfiguration={ 'CORSRules': [ { 'AllowedMethods': ['GET', 'PUT'], 'AllowedOrigins': origins or ['*'], 'AllowedHeaders': ['*'], }, ], }, )
Add protocol to hosts for CORS
Add protocol to hosts for CORS
Python
cc0-1.0
18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,eregs/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site
python
## Code Before: import boto3 from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Set CORS rules on the Notice and Comment attachment bucket' def handle(self, *args, **options): session = boto3.Session( aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID, aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY, ) s3 = session.client('s3') s3.put_bucket_cors( Bucket=settings.ATTACHMENT_BUCKET, CORSConfiguration={ 'CORSRules': [ { 'AllowedMethods': ['GET', 'PUT'], 'AllowedOrigins': settings.ALLOWED_HOSTS or ['*'], 'AllowedHeaders': ['*'], }, ], }, ) ## Instruction: Add protocol to hosts for CORS ## Code After: import boto3 from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Set CORS rules on the Notice and Comment attachment bucket' def handle(self, *args, **options): hosts = settings.ALLOWED_HOSTS origins = ['http://' + host for host in hosts] origins = origins + ['https://' + host for host in hosts] session = boto3.Session( aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID, aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY, ) s3 = session.client('s3') s3.put_bucket_cors( Bucket=settings.ATTACHMENT_BUCKET, CORSConfiguration={ 'CORSRules': [ { 'AllowedMethods': ['GET', 'PUT'], 'AllowedOrigins': origins or ['*'], 'AllowedHeaders': ['*'], }, ], }, )
# ... existing code ... help = 'Set CORS rules on the Notice and Comment attachment bucket' def handle(self, *args, **options): hosts = settings.ALLOWED_HOSTS origins = ['http://' + host for host in hosts] origins = origins + ['https://' + host for host in hosts] session = boto3.Session( aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID, aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY, ) s3 = session.client('s3') s3.put_bucket_cors( Bucket=settings.ATTACHMENT_BUCKET, CORSConfiguration={ # ... modified code ... 'CORSRules': [ { 'AllowedMethods': ['GET', 'PUT'], 'AllowedOrigins': origins or ['*'], 'AllowedHeaders': ['*'], }, ], # ... rest of the code ...
7cbc6ae58357ef647a007e1b505884e523d924c2
numba/tests/test_ctypes_call.py
numba/tests/test_ctypes_call.py
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
Fix ctypes call test for windows
Fix ctypes call test for windows
Python
bsd-2-clause
sklam/numba,pitrou/numba,GaZ3ll3/numba,numba/numba,jriehl/numba,sklam/numba,IntelLabs/numba,IntelLabs/numba,jriehl/numba,pitrou/numba,shiquanwang/numba,gdementen/numba,stonebig/numba,pombredanne/numba,gmarkall/numba,cpcloud/numba,stonebig/numba,jriehl/numba,ssarangi/numba,gdementen/numba,stuartarchibald/numba,ssarangi/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,ssarangi/numba,gmarkall/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,sklam/numba,gdementen/numba,stuartarchibald/numba,jriehl/numba,stefanseefeld/numba,jriehl/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,cpcloud/numba,stonebig/numba,seibert/numba,pitrou/numba,numba/numba,ssarangi/numba,stefanseefeld/numba,stonebig/numba,seibert/numba,pitrou/numba,pombredanne/numba,pombredanne/numba,IntelLabs/numba,stefanseefeld/numba,pitrou/numba,numba/numba,GaZ3ll3/numba,GaZ3ll3/numba,shiquanwang/numba,IntelLabs/numba,stuartarchibald/numba,pombredanne/numba,gdementen/numba,seibert/numba,shiquanwang/numba,sklam/numba,stefanseefeld/numba,sklam/numba,numba/numba,numba/numba,gdementen/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,stefanseefeld/numba,seibert/numba,ssarangi/numba,pombredanne/numba
python
## Code Before: import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return() ## Instruction: Fix ctypes call test for windows ## Code After: import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
... def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ...
8ec9b3d2df3b68c96eeb18917bc62758aba6add2
src/info/tregmine/commands/BlockHereCommand.java
src/info/tregmine/commands/BlockHereCommand.java
package info.tregmine.commands; import org.bukkit.block.Block; import org.bukkit.Location; import org.bukkit.Material; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; public class BlockHereCommand extends AbstractCommand { public BlockHereCommand(Tregmine tregmine) { super(tregmine, "blockhere"); } @Override public boolean handlePlayer(TregminePlayer player, String[] args) { if (!player.getRank().canFill()) { return true; } Location loc = player.getLocation(); Block block = player.getWorld().getBlockAt(loc.getBlockX(), loc.getBlockY() - 1, loc.getBlockZ()); block.setType(Material.DIRT); return true; } }
package info.tregmine.commands; import org.bukkit.block.Block; import org.bukkit.Material; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; public class BlockHereCommand extends AbstractCommand { public BlockHereCommand(Tregmine tregmine) { super(tregmine, "blockhere"); } @Override public boolean handlePlayer(TregminePlayer player, String[] args) { if (!player.getRank().canFill()) { return true; } Block block = player.getWorld().getBlockAt(player.getLocation()); block.setType(Material.DIRT); return true; } }
Revert "Modified /blockghere so it places block beneath player"
Revert "Modified /blockghere so it places block beneath player" This reverts commit cecd55798926768236464c8782071cbd253f848a.
Java
bsd-3-clause
Clunker5/tregmine-2.0,Clunker5/tregmine-2.0,EmilHernvall/tregmine,EmilHernvall/tregmine,EmilHernvall/tregmine
java
## Code Before: package info.tregmine.commands; import org.bukkit.block.Block; import org.bukkit.Location; import org.bukkit.Material; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; public class BlockHereCommand extends AbstractCommand { public BlockHereCommand(Tregmine tregmine) { super(tregmine, "blockhere"); } @Override public boolean handlePlayer(TregminePlayer player, String[] args) { if (!player.getRank().canFill()) { return true; } Location loc = player.getLocation(); Block block = player.getWorld().getBlockAt(loc.getBlockX(), loc.getBlockY() - 1, loc.getBlockZ()); block.setType(Material.DIRT); return true; } } ## Instruction: Revert "Modified /blockghere so it places block beneath player" This reverts commit cecd55798926768236464c8782071cbd253f848a. ## Code After: package info.tregmine.commands; import org.bukkit.block.Block; import org.bukkit.Material; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; public class BlockHereCommand extends AbstractCommand { public BlockHereCommand(Tregmine tregmine) { super(tregmine, "blockhere"); } @Override public boolean handlePlayer(TregminePlayer player, String[] args) { if (!player.getRank().canFill()) { return true; } Block block = player.getWorld().getBlockAt(player.getLocation()); block.setType(Material.DIRT); return true; } }
... package info.tregmine.commands; import org.bukkit.block.Block; import org.bukkit.Material; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; ... return true; } Block block = player.getWorld().getBlockAt(player.getLocation()); block.setType(Material.DIRT); return true; ...
d7e03596f8bf1e886e984c0ea98334af878a15e2
meta/bytecodetools/print_code.py
meta/bytecodetools/print_code.py
''' Created on May 10, 2012 @author: sean ''' from .bytecode_consumer import ByteCodeConsumer from argparse import ArgumentParser class ByteCodePrinter(ByteCodeConsumer): def generic_consume(self, instr): print instr def main(): parser = ArgumentParser() parser.add_argument() if __name__ == '__main__': main()
''' Created on May 10, 2012 @author: sean ''' from __future__ import print_function from .bytecode_consumer import ByteCodeConsumer from argparse import ArgumentParser class ByteCodePrinter(ByteCodeConsumer): def generic_consume(self, instr): print(instr) def main(): parser = ArgumentParser() parser.add_argument() if __name__ == '__main__': main()
Use __future__.print_function so syntax is valid on Python 3
Use __future__.print_function so syntax is valid on Python 3
Python
bsd-3-clause
enthought/Meta,gutomaia/Meta
python
## Code Before: ''' Created on May 10, 2012 @author: sean ''' from .bytecode_consumer import ByteCodeConsumer from argparse import ArgumentParser class ByteCodePrinter(ByteCodeConsumer): def generic_consume(self, instr): print instr def main(): parser = ArgumentParser() parser.add_argument() if __name__ == '__main__': main() ## Instruction: Use __future__.print_function so syntax is valid on Python 3 ## Code After: ''' Created on May 10, 2012 @author: sean ''' from __future__ import print_function from .bytecode_consumer import ByteCodeConsumer from argparse import ArgumentParser class ByteCodePrinter(ByteCodeConsumer): def generic_consume(self, instr): print(instr) def main(): parser = ArgumentParser() parser.add_argument() if __name__ == '__main__': main()
// ... existing code ... @author: sean ''' from __future__ import print_function from .bytecode_consumer import ByteCodeConsumer from argparse import ArgumentParser // ... modified code ... class ByteCodePrinter(ByteCodeConsumer): def generic_consume(self, instr): print(instr) def main(): parser = ArgumentParser() // ... rest of the code ...
2bad176fa23612fc4e2a7a6a5e7ac7f151c11315
gstrap/src/main/java/com/voodoodyne/gstrap/rest/ErrorBody.java
gstrap/src/main/java/com/voodoodyne/gstrap/rest/ErrorBody.java
package com.voodoodyne.gstrap.rest; import com.fasterxml.jackson.annotation.JsonUnwrapped; import com.voodoodyne.gstrap.lang.Types; import lombok.RequiredArgsConstructor; import java.util.List; /** * JSON structure of an error body from the ThrowableMapper */ @RequiredArgsConstructor public class ErrorBody { private final Throwable t; public String getMessage() { return t.getMessage(); } public String getType() { return t.getClass().getSimpleName(); } public List<String> getTypes() { return Types.getTypes(t, ClientException.class, RuntimeException.class, Exception.class, Throwable.class); } public ErrorBody getCause() { return t.getCause() == null ? null : new ErrorBody(t.getCause()); } @JsonUnwrapped public Object getAdditionalProperties() { if (t instanceof AdditionalProperties) { return ((AdditionalProperties)t).getAdditionalProperties(); } else { return null; } } }
package com.voodoodyne.gstrap.rest; import com.fasterxml.jackson.annotation.JsonUnwrapped; import com.voodoodyne.gstrap.lang.Types; import lombok.RequiredArgsConstructor; import java.util.List; import java.util.stream.Collectors; /** * JSON structure of an error body from the ThrowableMapper */ @RequiredArgsConstructor public class ErrorBody { private static final String stripExceptionSuffix(final String className) { if (className.endsWith("Exception")) { return className.substring(0, className.length() - "Exception".length()); } else { return className; } } private final Throwable t; public String getMessage() { return t.getMessage(); } public String getType() { return stripExceptionSuffix(t.getClass().getSimpleName()); } public List<String> getTypes() { return Types.getTypes(t, ClientException.class, RuntimeException.class, Exception.class, Throwable.class) .stream() .map(ErrorBody::stripExceptionSuffix).collect(Collectors.toList()); } public ErrorBody getCause() { return t.getCause() == null ? null : new ErrorBody(t.getCause()); } @JsonUnwrapped public Object getAdditionalProperties() { if (t instanceof AdditionalProperties) { return ((AdditionalProperties)t).getAdditionalProperties(); } else { return null; } } }
Remove "Exception" from the error text
Remove "Exception" from the error text
Java
mit
stickfigure/gstrap
java
## Code Before: package com.voodoodyne.gstrap.rest; import com.fasterxml.jackson.annotation.JsonUnwrapped; import com.voodoodyne.gstrap.lang.Types; import lombok.RequiredArgsConstructor; import java.util.List; /** * JSON structure of an error body from the ThrowableMapper */ @RequiredArgsConstructor public class ErrorBody { private final Throwable t; public String getMessage() { return t.getMessage(); } public String getType() { return t.getClass().getSimpleName(); } public List<String> getTypes() { return Types.getTypes(t, ClientException.class, RuntimeException.class, Exception.class, Throwable.class); } public ErrorBody getCause() { return t.getCause() == null ? null : new ErrorBody(t.getCause()); } @JsonUnwrapped public Object getAdditionalProperties() { if (t instanceof AdditionalProperties) { return ((AdditionalProperties)t).getAdditionalProperties(); } else { return null; } } } ## Instruction: Remove "Exception" from the error text ## Code After: package com.voodoodyne.gstrap.rest; import com.fasterxml.jackson.annotation.JsonUnwrapped; import com.voodoodyne.gstrap.lang.Types; import lombok.RequiredArgsConstructor; import java.util.List; import java.util.stream.Collectors; /** * JSON structure of an error body from the ThrowableMapper */ @RequiredArgsConstructor public class ErrorBody { private static final String stripExceptionSuffix(final String className) { if (className.endsWith("Exception")) { return className.substring(0, className.length() - "Exception".length()); } else { return className; } } private final Throwable t; public String getMessage() { return t.getMessage(); } public String getType() { return stripExceptionSuffix(t.getClass().getSimpleName()); } public List<String> getTypes() { return Types.getTypes(t, ClientException.class, RuntimeException.class, Exception.class, Throwable.class) .stream() .map(ErrorBody::stripExceptionSuffix).collect(Collectors.toList()); } public ErrorBody getCause() { return t.getCause() == null ? null : new ErrorBody(t.getCause()); } @JsonUnwrapped public Object getAdditionalProperties() { if (t instanceof AdditionalProperties) { return ((AdditionalProperties)t).getAdditionalProperties(); } else { return null; } } }
... import lombok.RequiredArgsConstructor; import java.util.List; import java.util.stream.Collectors; /** * JSON structure of an error body from the ThrowableMapper ... */ @RequiredArgsConstructor public class ErrorBody { private static final String stripExceptionSuffix(final String className) { if (className.endsWith("Exception")) { return className.substring(0, className.length() - "Exception".length()); } else { return className; } } private final Throwable t; public String getMessage() { ... } public String getType() { return stripExceptionSuffix(t.getClass().getSimpleName()); } public List<String> getTypes() { return Types.getTypes(t, ClientException.class, RuntimeException.class, Exception.class, Throwable.class) .stream() .map(ErrorBody::stripExceptionSuffix).collect(Collectors.toList()); } public ErrorBody getCause() { ...
a8c8b136f081e3a2c7f1fd1f833a85288a358e42
vumi_http_retry/workers/api/validate.py
vumi_http_retry/workers/api/validate.py
import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req, *a, **kw): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body, *a, **kw): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
Change validators to allow additional arguments to be given to the functions they are wrapping
Change validators to allow additional arguments to be given to the functions they are wrapping
Python
bsd-3-clause
praekelt/vumi-http-retry-api,praekelt/vumi-http-retry-api
python
## Code Before: import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator ## Instruction: Change validators to allow additional arguments to be given to the functions they are wrapping ## Code After: import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req, *a, **kw): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body, *a, **kw): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
# ... existing code ... def has_header(name): def validator(req, *a, **kw): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', # ... modified code ... def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body, *a, **kw): return [{ 'type': 'invalid_body', 'message': e.message # ... rest of the code ...
ccbe4a1c48765fdd9e785392dff949bcc49192a2
setup.py
setup.py
from distutils.core import setup setup( name='Zinc', version='0.1.7', author='John Wang', author_email='[email protected]', packages=['zinc'], package_dir={'zinc': ''}, package_data={'zinc': ['examples/*.py', 'examples/*.json', 'README', 'zinc/*']}, include_package_data=True, url='https://github.com/wangjohn/zinc_cli', license='LICENSE.txt', description='Wrapper for Zinc ecommerce API (zinc.io)', install_requires=[ "requests >= 1.1.0" ], )
from distutils.core import setup setup( name='Zinc', version='0.1.8', author='John Wang', author_email='[email protected]', packages=['zinc'], package_dir={'zinc': ''}, package_data={'zinc': ['examples/*.py', 'examples/*.json', 'zinc/*']}, include_package_data=True, url='https://github.com/wangjohn/zinc_cli', license='LICENSE.txt', description='Wrapper for Zinc ecommerce API (zinc.io)', install_requires=[ "requests >= 1.1.0" ], )
Remove readme from package data.
Remove readme from package data.
Python
mit
wangjohn/zinc_cli
python
## Code Before: from distutils.core import setup setup( name='Zinc', version='0.1.7', author='John Wang', author_email='[email protected]', packages=['zinc'], package_dir={'zinc': ''}, package_data={'zinc': ['examples/*.py', 'examples/*.json', 'README', 'zinc/*']}, include_package_data=True, url='https://github.com/wangjohn/zinc_cli', license='LICENSE.txt', description='Wrapper for Zinc ecommerce API (zinc.io)', install_requires=[ "requests >= 1.1.0" ], ) ## Instruction: Remove readme from package data. ## Code After: from distutils.core import setup setup( name='Zinc', version='0.1.8', author='John Wang', author_email='[email protected]', packages=['zinc'], package_dir={'zinc': ''}, package_data={'zinc': ['examples/*.py', 'examples/*.json', 'zinc/*']}, include_package_data=True, url='https://github.com/wangjohn/zinc_cli', license='LICENSE.txt', description='Wrapper for Zinc ecommerce API (zinc.io)', install_requires=[ "requests >= 1.1.0" ], )
... setup( name='Zinc', version='0.1.8', author='John Wang', author_email='[email protected]', packages=['zinc'], package_dir={'zinc': ''}, package_data={'zinc': ['examples/*.py', 'examples/*.json', 'zinc/*']}, include_package_data=True, url='https://github.com/wangjohn/zinc_cli', license='LICENSE.txt', ...
8541ec09e237f1401095d31177bdde9ac1adaa39
util/linkJS.py
util/linkJS.py
import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(source_base, source_fn, target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(module_base, module_fn, target) def addContents(source_base, source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(os.path.join(source_base, source_fn)) as source: for line in source: target.write(line)
import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(os.path.join(source_base, source_fn), target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(os.path.join(module_base, module_fn), target) def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(source_fn) as source: for line in source: target.write(line)
Include full path to original files
Include full path to original files
Python
mpl-2.0
MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz
python
## Code Before: import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(source_base, source_fn, target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(module_base, module_fn, target) def addContents(source_base, source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(os.path.join(source_base, source_fn)) as source: for line in source: target.write(line) ## Instruction: Include full path to original files ## Code After: import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(os.path.join(source_base, source_fn), target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(os.path.join(module_base, module_fn), target) def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(source_fn) as source: for line in source: target.write(line)
... for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(os.path.join(source_base, source_fn), target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(os.path.join(module_base, module_fn), target) def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(source_fn) as source: for line in source: target.write(line) ...
2e33218626a4a53a9336f22cc83e5bca1342562b
stroom-data/stroom-data-store-impl-fs/src/main/java/stroom/data/store/impl/fs/StreamTypeExtensions.java
stroom-data/stroom-data-store-impl-fs/src/main/java/stroom/data/store/impl/fs/StreamTypeExtensions.java
package stroom.data.store.impl.fs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; class StreamTypeExtensions { private static final Logger LOGGER = LoggerFactory.getLogger(StreamTypeExtensions.class); private static final Map<String, String> EXTENSION_MAP = new HashMap<>(); static { EXTENSION_MAP.put("Manifest", "mf"); EXTENSION_MAP.put("Raw Events", "revt"); EXTENSION_MAP.put("Raw Reference", "rref"); EXTENSION_MAP.put("Events", "evt"); EXTENSION_MAP.put("Reference", "ref"); EXTENSION_MAP.put("Test Events", "tevt"); EXTENSION_MAP.put("Test Reference", "tref"); EXTENSION_MAP.put("Segment Index", "seg"); EXTENSION_MAP.put("Boundary Index", "bdy"); EXTENSION_MAP.put("Meta Data", "meta"); EXTENSION_MAP.put("Error", "err"); EXTENSION_MAP.put("Context", "ctx"); } static String getExtension(final String streamType) { String extension = EXTENSION_MAP.get(streamType); if (extension == null) { LOGGER.warn("Unknown stream type '" + streamType + "' using extension 'dat'"); extension = "dat"; } return extension; } }
package stroom.data.store.impl.fs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; class StreamTypeExtensions { private static final Logger LOGGER = LoggerFactory.getLogger(StreamTypeExtensions.class); private static final Map<String, String> EXTENSION_MAP = new HashMap<>(); static { EXTENSION_MAP.put("Manifest", "mf"); EXTENSION_MAP.put("Raw Events", "revt"); EXTENSION_MAP.put("Raw Reference", "rref"); EXTENSION_MAP.put("Events", "evt"); EXTENSION_MAP.put("Reference", "ref"); EXTENSION_MAP.put("Test Events", "tevt"); EXTENSION_MAP.put("Test Reference", "tref"); EXTENSION_MAP.put("Segment Index", "seg"); EXTENSION_MAP.put("Boundary Index", "bdy"); EXTENSION_MAP.put("Meta Data", "meta"); EXTENSION_MAP.put("Error", "err"); EXTENSION_MAP.put("Context", "ctx"); EXTENSION_MAP.put("Detections", "dtxn"); } static String getExtension(final String streamType) { String extension = EXTENSION_MAP.get(streamType); if (extension == null) { LOGGER.warn("Unknown stream type '" + streamType + "' using extension 'dat'"); extension = "dat"; } return extension; } }
Introduce new Detections stream type
Introduce new Detections stream type
Java
apache-2.0
gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom
java
## Code Before: package stroom.data.store.impl.fs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; class StreamTypeExtensions { private static final Logger LOGGER = LoggerFactory.getLogger(StreamTypeExtensions.class); private static final Map<String, String> EXTENSION_MAP = new HashMap<>(); static { EXTENSION_MAP.put("Manifest", "mf"); EXTENSION_MAP.put("Raw Events", "revt"); EXTENSION_MAP.put("Raw Reference", "rref"); EXTENSION_MAP.put("Events", "evt"); EXTENSION_MAP.put("Reference", "ref"); EXTENSION_MAP.put("Test Events", "tevt"); EXTENSION_MAP.put("Test Reference", "tref"); EXTENSION_MAP.put("Segment Index", "seg"); EXTENSION_MAP.put("Boundary Index", "bdy"); EXTENSION_MAP.put("Meta Data", "meta"); EXTENSION_MAP.put("Error", "err"); EXTENSION_MAP.put("Context", "ctx"); } static String getExtension(final String streamType) { String extension = EXTENSION_MAP.get(streamType); if (extension == null) { LOGGER.warn("Unknown stream type '" + streamType + "' using extension 'dat'"); extension = "dat"; } return extension; } } ## Instruction: Introduce new Detections stream type ## Code After: package stroom.data.store.impl.fs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; class StreamTypeExtensions { private static final Logger LOGGER = LoggerFactory.getLogger(StreamTypeExtensions.class); private static final Map<String, String> EXTENSION_MAP = new HashMap<>(); static { EXTENSION_MAP.put("Manifest", "mf"); EXTENSION_MAP.put("Raw Events", "revt"); EXTENSION_MAP.put("Raw Reference", "rref"); EXTENSION_MAP.put("Events", "evt"); EXTENSION_MAP.put("Reference", "ref"); EXTENSION_MAP.put("Test Events", "tevt"); EXTENSION_MAP.put("Test Reference", "tref"); EXTENSION_MAP.put("Segment Index", "seg"); EXTENSION_MAP.put("Boundary Index", "bdy"); EXTENSION_MAP.put("Meta Data", "meta"); EXTENSION_MAP.put("Error", "err"); EXTENSION_MAP.put("Context", "ctx"); EXTENSION_MAP.put("Detections", "dtxn"); } static String getExtension(final String streamType) { String extension = EXTENSION_MAP.get(streamType); if (extension == null) { LOGGER.warn("Unknown stream type '" + streamType + "' using extension 'dat'"); extension = "dat"; } return extension; } }
... EXTENSION_MAP.put("Meta Data", "meta"); EXTENSION_MAP.put("Error", "err"); EXTENSION_MAP.put("Context", "ctx"); EXTENSION_MAP.put("Detections", "dtxn"); } static String getExtension(final String streamType) { ...
67d067fe499ba2ec78d34083640a4bfe9835d62b
tests/test_sequence.py
tests/test_sequence.py
from unittest import TestCase from prudent.sequence import Sequence class SequenceTest(TestCase): def setUp(self): self.seq = Sequence([1, 2, 3]) def test_getitem(self): assert self.seq[0] == 1 self.seq[2] assert self.seq[2] == 3 def test_len(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter(self): for _ in range(2): assert list(self.seq) == [1, 2, 3]
from unittest import TestCase from prudent.sequence import Sequence class SequenceTest(TestCase): def setUp(self): self.seq = Sequence([1, 2, 3]) def test_getitem(self): assert self.seq[0] == 1 assert self.seq[2] == 3 def test_getitem_raises_indexerror(self): self.assertRaises(IndexError, lambda: self.seq[3]) def test_len_returns_current_size(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter_preserves_elems(self): for _ in range(2): assert list(self.seq) == [1, 2, 3]
Test that IndexError is raised when appropriate
Test that IndexError is raised when appropriate
Python
mit
eugene-eeo/prudent
python
## Code Before: from unittest import TestCase from prudent.sequence import Sequence class SequenceTest(TestCase): def setUp(self): self.seq = Sequence([1, 2, 3]) def test_getitem(self): assert self.seq[0] == 1 self.seq[2] assert self.seq[2] == 3 def test_len(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter(self): for _ in range(2): assert list(self.seq) == [1, 2, 3] ## Instruction: Test that IndexError is raised when appropriate ## Code After: from unittest import TestCase from prudent.sequence import Sequence class SequenceTest(TestCase): def setUp(self): self.seq = Sequence([1, 2, 3]) def test_getitem(self): assert self.seq[0] == 1 assert self.seq[2] == 3 def test_getitem_raises_indexerror(self): self.assertRaises(IndexError, lambda: self.seq[3]) def test_len_returns_current_size(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter_preserves_elems(self): for _ in range(2): assert list(self.seq) == [1, 2, 3]
# ... existing code ... def test_getitem(self): assert self.seq[0] == 1 assert self.seq[2] == 3 def test_getitem_raises_indexerror(self): self.assertRaises(IndexError, lambda: self.seq[3]) def test_len_returns_current_size(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter_preserves_elems(self): for _ in range(2): assert list(self.seq) == [1, 2, 3] # ... rest of the code ...
cd0950333f5731c2e9f4ba6947800b70ecf33b54
src/main/java/seedu/address/logic/parser/CliSyntax.java
src/main/java/seedu/address/logic/parser/CliSyntax.java
package seedu.address.logic.parser; import java.util.regex.Pattern; import seedu.address.logic.parser.ArgumentTokenizer.Prefix; /** * Contains Command Line Interface (CLI) syntax definitions common to multiple commands */ public class CliSyntax { /* Prefix definitions */ public static final Prefix PREFIX_BOOK_DATE = new Prefix("on "); public static final Prefix PREFIX_BOOK_DATE_DELIMITER = new Prefix(","); public static final Prefix PREFIX_DEADLINE = new Prefix("by "); public static final Prefix PREFIX_TIMEINTERVAL_END = new Prefix("to "); public static final Prefix PREFIX_TIMEINTERVAL_START = new Prefix("from "); public static final Prefix PREFIX_LABEL = new Prefix("#"); public static final Prefix PREFIX_STATUS_COMPLETED = new Prefix("COMPLETED"); public static final Prefix PREFIX_STATUS_INCOMPLETE = new Prefix("INCOMPLETE"); /* Patterns definitions */ public static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace }
package seedu.address.logic.parser; import java.util.regex.Pattern; import seedu.address.logic.parser.ArgumentTokenizer.Prefix; /** * Contains Command Line Interface (CLI) syntax definitions common to multiple commands */ public class CliSyntax { /* Prefix definitions */ public static final Prefix PREFIX_BOOK_DATE = new Prefix("on "); public static final Prefix PREFIX_BOOK_DATE_DELIMITER = new Prefix(","); public static final Prefix PREFIX_DEADLINE = new Prefix("by "); public static final Prefix PREFIX_TIMEINTERVAL_END = new Prefix("to "); public static final Prefix PREFIX_TIMEINTERVAL_START = new Prefix("from "); public static final Prefix PREFIX_LABEL = new Prefix("#"); public static final Prefix PREFIX_STATUS_COMPLETED = new Prefix("completed"); public static final Prefix PREFIX_STATUS_INCOMPLETE = new Prefix("incomplete"); /* Patterns definitions */ public static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace }
Change keyword from CAPS to small letter
Change keyword from CAPS to small letter
Java
mit
CS2103JAN2017-T16-B2/main,CS2103JAN2017-T16-B2/main
java
## Code Before: package seedu.address.logic.parser; import java.util.regex.Pattern; import seedu.address.logic.parser.ArgumentTokenizer.Prefix; /** * Contains Command Line Interface (CLI) syntax definitions common to multiple commands */ public class CliSyntax { /* Prefix definitions */ public static final Prefix PREFIX_BOOK_DATE = new Prefix("on "); public static final Prefix PREFIX_BOOK_DATE_DELIMITER = new Prefix(","); public static final Prefix PREFIX_DEADLINE = new Prefix("by "); public static final Prefix PREFIX_TIMEINTERVAL_END = new Prefix("to "); public static final Prefix PREFIX_TIMEINTERVAL_START = new Prefix("from "); public static final Prefix PREFIX_LABEL = new Prefix("#"); public static final Prefix PREFIX_STATUS_COMPLETED = new Prefix("COMPLETED"); public static final Prefix PREFIX_STATUS_INCOMPLETE = new Prefix("INCOMPLETE"); /* Patterns definitions */ public static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace } ## Instruction: Change keyword from CAPS to small letter ## Code After: package seedu.address.logic.parser; import java.util.regex.Pattern; import seedu.address.logic.parser.ArgumentTokenizer.Prefix; /** * Contains Command Line Interface (CLI) syntax definitions common to multiple commands */ public class CliSyntax { /* Prefix definitions */ public static final Prefix PREFIX_BOOK_DATE = new Prefix("on "); public static final Prefix PREFIX_BOOK_DATE_DELIMITER = new Prefix(","); public static final Prefix PREFIX_DEADLINE = new Prefix("by "); public static final Prefix PREFIX_TIMEINTERVAL_END = new Prefix("to "); public static final Prefix PREFIX_TIMEINTERVAL_START = new Prefix("from "); public static final Prefix PREFIX_LABEL = new Prefix("#"); public static final Prefix PREFIX_STATUS_COMPLETED = new Prefix("completed"); public static final Prefix PREFIX_STATUS_INCOMPLETE = new Prefix("incomplete"); /* Patterns definitions */ public static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace }
... public static final Prefix PREFIX_TIMEINTERVAL_END = new Prefix("to "); public static final Prefix PREFIX_TIMEINTERVAL_START = new Prefix("from "); public static final Prefix PREFIX_LABEL = new Prefix("#"); public static final Prefix PREFIX_STATUS_COMPLETED = new Prefix("completed"); public static final Prefix PREFIX_STATUS_INCOMPLETE = new Prefix("incomplete"); /* Patterns definitions */ public static final Pattern KEYWORDS_ARGS_FORMAT = ...
dfdd4182c477f0d49fe33d99f339bcd5ed7fb6bd
src/test/java/net/floodlightcontroller/linkdiscovery/internal/TestableLinkStorageImpl.java
src/test/java/net/floodlightcontroller/linkdiscovery/internal/TestableLinkStorageImpl.java
package net.floodlightcontroller.linkdiscovery.internal; import java.util.Set; import com.thinkaurelius.titan.core.TitanGraph; import com.tinkerpop.blueprints.TransactionalGraph.Conclusion; import com.tinkerpop.blueprints.Vertex; /** * Seam that allows me to set up a testable instance of LinkStorageImpl that * writes to a file database rather than a Cassandra cluster. * It seems the init() API on LinkStorageImpl might change so I won't rely * on it yet. * * @author jono * */ public class TestableLinkStorageImpl extends LinkStorageImpl { public TestableLinkStorageImpl(TitanGraph graph){ this.graph = graph; } @Override public void init(String conf){ Set<String> s = graph.getIndexedKeys(Vertex.class); if (!s.contains("dpid")) { graph.createKeyIndex("dpid", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } if (!s.contains("type")) { graph.createKeyIndex("type", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } } }
package net.floodlightcontroller.linkdiscovery.internal; import java.util.Set; import com.thinkaurelius.titan.core.TitanGraph; import com.tinkerpop.blueprints.TransactionalGraph.Conclusion; import com.tinkerpop.blueprints.Vertex; /** * Seam that allows me to set up a testable instance of LinkStorageImpl that * writes to a file database rather than a Cassandra cluster. * It seems the init() API on LinkStorageImpl might change so I won't rely * on it yet. * * @author jono * */ public class TestableLinkStorageImpl extends LinkStorageImpl { protected TitanGraph graph; public TestableLinkStorageImpl(TitanGraph graph){ this.graph = graph; } @Override public void init(String conf){ Set<String> s = graph.getIndexedKeys(Vertex.class); if (!s.contains("dpid")) { graph.createKeyIndex("dpid", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } if (!s.contains("type")) { graph.createKeyIndex("type", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } } }
Fix a test case failing to build.
Fix a test case failing to build.
Java
apache-2.0
opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open
java
## Code Before: package net.floodlightcontroller.linkdiscovery.internal; import java.util.Set; import com.thinkaurelius.titan.core.TitanGraph; import com.tinkerpop.blueprints.TransactionalGraph.Conclusion; import com.tinkerpop.blueprints.Vertex; /** * Seam that allows me to set up a testable instance of LinkStorageImpl that * writes to a file database rather than a Cassandra cluster. * It seems the init() API on LinkStorageImpl might change so I won't rely * on it yet. * * @author jono * */ public class TestableLinkStorageImpl extends LinkStorageImpl { public TestableLinkStorageImpl(TitanGraph graph){ this.graph = graph; } @Override public void init(String conf){ Set<String> s = graph.getIndexedKeys(Vertex.class); if (!s.contains("dpid")) { graph.createKeyIndex("dpid", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } if (!s.contains("type")) { graph.createKeyIndex("type", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } } } ## Instruction: Fix a test case failing to build. ## Code After: package net.floodlightcontroller.linkdiscovery.internal; import java.util.Set; import com.thinkaurelius.titan.core.TitanGraph; import com.tinkerpop.blueprints.TransactionalGraph.Conclusion; import com.tinkerpop.blueprints.Vertex; /** * Seam that allows me to set up a testable instance of LinkStorageImpl that * writes to a file database rather than a Cassandra cluster. * It seems the init() API on LinkStorageImpl might change so I won't rely * on it yet. * * @author jono * */ public class TestableLinkStorageImpl extends LinkStorageImpl { protected TitanGraph graph; public TestableLinkStorageImpl(TitanGraph graph){ this.graph = graph; } @Override public void init(String conf){ Set<String> s = graph.getIndexedKeys(Vertex.class); if (!s.contains("dpid")) { graph.createKeyIndex("dpid", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } if (!s.contains("type")) { graph.createKeyIndex("type", Vertex.class); graph.stopTransaction(Conclusion.SUCCESS); } } }
# ... existing code ... */ public class TestableLinkStorageImpl extends LinkStorageImpl { protected TitanGraph graph; public TestableLinkStorageImpl(TitanGraph graph){ this.graph = graph; # ... rest of the code ...
94861438189537b88deaf8d04cc9942192038d8c
user_messages/views.py
user_messages/views.py
from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) if request.user == thread.to_user: thread.to_user_unread = False else: thread.from_user_unread = False thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
Update the read status of a thread when it's viewed
Update the read status of a thread when it's viewed
Python
mit
eldarion/user_messages,eldarion/user_messages,pinax/pinax-messages,arthur-wsw/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages
python
## Code Before: from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request)) ## Instruction: Update the read status of a thread when it's viewed ## Code After: from django.contrib.auth.decorators import login_required from django.db.models import Q from django.shortcuts import get_object_or_404 from django.template import RequestContext from user_messages.models import Thread, Message @login_required def inbox(request, template_name='user_messages/inbox.html'): threads = list(Thread.objects.inbox(request.user)) threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True) return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request)) @login_required def thread_detail(request, thread_id, template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) if request.user == thread.to_user: thread.to_user_unread = False else: thread.from_user_unread = False thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
# ... existing code ... template_name='user_messages/thread_detail.html'): qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user)) thread = get_object_or_404(qs, pk=thread_id) if request.user == thread.to_user: thread.to_user_unread = False else: thread.from_user_unread = False thread.save() return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request)) # ... rest of the code ...
f745ce828a0949b63b4e83e13ac8106273d0a162
setup.py
setup.py
import setuptools with open('VERSION') as f: version = f.read().strip() with open('README.md') as f: readme = f.read() setuptools.setup( name='alerta', version=version, description='Alerta unified command-line tool and SDK', long_description=readme, url='http://github.com/alerta/python-alerta', license='MIT', author='Nick Satterly', author_email='[email protected]', packages=setuptools.find_packages(exclude=['tests']), install_requires=[ 'Click', 'requests', 'tabulate', 'pytz', 'six' ], include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'alerta = alertaclient.cli:cli' ] }, keywords='alerta client unified command line tool sdk', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Intended Audience :: Telecommunications Industry', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.5', 'Topic :: System :: Monitoring', ], python_requires='>=3.5' )
import os import setuptools def read(filename): return open(os.path.join(os.path.dirname(__file__), filename)).read() setuptools.setup( name='alerta', version=read('VERSION'), description='Alerta unified command-line tool and SDK', long_description=read('README.md'), long_description_content_type='text/markdown', license='Apache License 2.0', author='Nick Satterly', author_email='[email protected]', packages=setuptools.find_packages(exclude=['tests']), install_requires=[ 'Click', 'requests', 'tabulate', 'pytz', 'six' ], include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'alerta = alertaclient.cli:cli' ] }, keywords='alerta client unified command line tool sdk', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Intended Audience :: Telecommunications Industry', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.5', 'Topic :: System :: Monitoring', 'Topic :: Software Development :: Libraries :: Python Modules' ], python_requires='>=3.5' )
Use markdown as README format
Use markdown as README format
Python
apache-2.0
alerta/python-alerta-client,alerta/python-alerta,alerta/python-alerta-client
python
## Code Before: import setuptools with open('VERSION') as f: version = f.read().strip() with open('README.md') as f: readme = f.read() setuptools.setup( name='alerta', version=version, description='Alerta unified command-line tool and SDK', long_description=readme, url='http://github.com/alerta/python-alerta', license='MIT', author='Nick Satterly', author_email='[email protected]', packages=setuptools.find_packages(exclude=['tests']), install_requires=[ 'Click', 'requests', 'tabulate', 'pytz', 'six' ], include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'alerta = alertaclient.cli:cli' ] }, keywords='alerta client unified command line tool sdk', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Intended Audience :: Telecommunications Industry', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.5', 'Topic :: System :: Monitoring', ], python_requires='>=3.5' ) ## Instruction: Use markdown as README format ## Code After: import os import setuptools def read(filename): return open(os.path.join(os.path.dirname(__file__), filename)).read() setuptools.setup( name='alerta', version=read('VERSION'), description='Alerta unified command-line tool and SDK', long_description=read('README.md'), long_description_content_type='text/markdown', license='Apache License 2.0', author='Nick Satterly', author_email='[email protected]', packages=setuptools.find_packages(exclude=['tests']), install_requires=[ 'Click', 'requests', 'tabulate', 'pytz', 'six' ], include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'alerta = alertaclient.cli:cli' ] }, keywords='alerta client unified command line tool sdk', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Intended Audience :: Telecommunications Industry', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.5', 'Topic :: System :: Monitoring', 'Topic :: Software Development :: Libraries :: Python Modules' ], python_requires='>=3.5' )
// ... existing code ... import os import setuptools def read(filename): return open(os.path.join(os.path.dirname(__file__), filename)).read() setuptools.setup( name='alerta', version=read('VERSION'), description='Alerta unified command-line tool and SDK', long_description=read('README.md'), long_description_content_type='text/markdown', license='Apache License 2.0', author='Nick Satterly', author_email='[email protected]', packages=setuptools.find_packages(exclude=['tests']), install_requires=[ 'Click', // ... modified code ... keywords='alerta client unified command line tool sdk', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Intended Audience :: Telecommunications Industry', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.5', 'Topic :: System :: Monitoring', 'Topic :: Software Development :: Libraries :: Python Modules' ], python_requires='>=3.5' ) // ... rest of the code ...
a52a248ec8d38168dd15989ec6daacad1d43bdbb
test/Driver/amdgpu-toolchain.c
test/Driver/amdgpu-toolchain.c
// RUN: %clang -### -target amdgcn--amdhsa -x assembler -mcpu=kaveri %s 2>&1 | FileCheck -check-prefix=AS_LINK %s // AS_LINK-LABEL: clang // AS_LINK: "-cc1as" // AS_LINK-LABEL: lld // AS_LINK: "-flavor" "gnu" "-target" "amdgcn--amdhsa" // REQUIRES: clang-driver
// RUN: %clang -### -target amdgcn--amdhsa -x assembler -mcpu=kaveri %s 2>&1 | FileCheck -check-prefix=AS_LINK %s // AS_LINK: /clang // AS_LINK-SAME: "-cc1as" // AS_LINK: /lld // AS_LINK-SAME: "-flavor" "gnu" "-target" "amdgcn--amdhsa"
Fix test to pass when the directory name has lld in it.
Fix test to pass when the directory name has lld in it. CHECK-LABEL assumes that there is only one occurrence of the match. The output looks like: clang version 3.8.0 (trunk 247999) .... /path/to/build/dir/bin/clang-3.8 .... If the path contains lld, the second CHECK-LABEL matches it and we fail since there is no -cc1as between clang and lld. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@248029 91177308-0d34-0410-b5e6-96231b3b80d8
C
apache-2.0
llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang
c
## Code Before: // RUN: %clang -### -target amdgcn--amdhsa -x assembler -mcpu=kaveri %s 2>&1 | FileCheck -check-prefix=AS_LINK %s // AS_LINK-LABEL: clang // AS_LINK: "-cc1as" // AS_LINK-LABEL: lld // AS_LINK: "-flavor" "gnu" "-target" "amdgcn--amdhsa" // REQUIRES: clang-driver ## Instruction: Fix test to pass when the directory name has lld in it. CHECK-LABEL assumes that there is only one occurrence of the match. The output looks like: clang version 3.8.0 (trunk 247999) .... /path/to/build/dir/bin/clang-3.8 .... If the path contains lld, the second CHECK-LABEL matches it and we fail since there is no -cc1as between clang and lld. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@248029 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: // RUN: %clang -### -target amdgcn--amdhsa -x assembler -mcpu=kaveri %s 2>&1 | FileCheck -check-prefix=AS_LINK %s // AS_LINK: /clang // AS_LINK-SAME: "-cc1as" // AS_LINK: /lld // AS_LINK-SAME: "-flavor" "gnu" "-target" "amdgcn--amdhsa"
// ... existing code ... // RUN: %clang -### -target amdgcn--amdhsa -x assembler -mcpu=kaveri %s 2>&1 | FileCheck -check-prefix=AS_LINK %s // AS_LINK: /clang // AS_LINK-SAME: "-cc1as" // AS_LINK: /lld // AS_LINK-SAME: "-flavor" "gnu" "-target" "amdgcn--amdhsa" // ... rest of the code ...
b54a6353a746d54869a7cadca1bdcfb1e1cd3d51
moviemanager/models.py
moviemanager/models.py
from django.db import models class Movie(models.Model): tmdb_id = models.IntegerField()
from django.contrib.auth.models import User from django.db import models class Movie(models.Model): tmdb_id = models.IntegerField() score = models.IntegerField() submitter = models.ForeignKey(User)
Add some extra fields to movie model
Add some extra fields to movie model
Python
mit
simon-andrews/movieman2,simon-andrews/movieman2
python
## Code Before: from django.db import models class Movie(models.Model): tmdb_id = models.IntegerField() ## Instruction: Add some extra fields to movie model ## Code After: from django.contrib.auth.models import User from django.db import models class Movie(models.Model): tmdb_id = models.IntegerField() score = models.IntegerField() submitter = models.ForeignKey(User)
... from django.contrib.auth.models import User from django.db import models class Movie(models.Model): tmdb_id = models.IntegerField() score = models.IntegerField() submitter = models.ForeignKey(User) ...
9f1d4788c5f3751b978da97434b5f6c2e22105b5
django_inbound_email/__init__.py
django_inbound_email/__init__.py
"""An inbound email handler for Django.""" __title__ = 'django-inbound-email' __version__ = '0.3.3' __author__ = 'YunoJuno Ltd' __license__ = 'MIT' __copyright__ = 'Copyright 2014 YunoJuno' __description__ = ( "A Django app to make it easy to receive inbound emails from " "a hosted transactional email service (e.g. SendGrid, Postmark, " "Mandrill, etc.)." )
"""An inbound email handler for Django.""" __title__ = 'django-inbound-email' __version__ = '0.3.3' __author__ = 'YunoJuno Ltd' __license__ = 'MIT' __copyright__ = 'Copyright 2014 YunoJuno' __description__ = 'A Django app for receiving inbound emails.'
Update package description so it displays correctly on PyPI.
Update package description so it displays correctly on PyPI. The description was wrapping, so it appeared with a single '(' character on PyPI. I've updated it so that it's now all on a single line.
Python
mit
yunojuno/django-inbound-email
python
## Code Before: """An inbound email handler for Django.""" __title__ = 'django-inbound-email' __version__ = '0.3.3' __author__ = 'YunoJuno Ltd' __license__ = 'MIT' __copyright__ = 'Copyright 2014 YunoJuno' __description__ = ( "A Django app to make it easy to receive inbound emails from " "a hosted transactional email service (e.g. SendGrid, Postmark, " "Mandrill, etc.)." ) ## Instruction: Update package description so it displays correctly on PyPI. The description was wrapping, so it appeared with a single '(' character on PyPI. I've updated it so that it's now all on a single line. ## Code After: """An inbound email handler for Django.""" __title__ = 'django-inbound-email' __version__ = '0.3.3' __author__ = 'YunoJuno Ltd' __license__ = 'MIT' __copyright__ = 'Copyright 2014 YunoJuno' __description__ = 'A Django app for receiving inbound emails.'
// ... existing code ... __author__ = 'YunoJuno Ltd' __license__ = 'MIT' __copyright__ = 'Copyright 2014 YunoJuno' __description__ = 'A Django app for receiving inbound emails.' // ... rest of the code ...
6ed56af7469f074ffc5779d92a75a0cfd5258977
src/test/java/info/u_team/u_team_test/init/TestBlocks.java
src/test/java/info/u_team/u_team_test/init/TestBlocks.java
package info.u_team.u_team_test.init; import info.u_team.u_team_core.util.registry.*; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.block.*; import net.minecraft.item.BlockItem; import net.minecraftforge.eventbus.api.IEventBus; public class TestBlocks { public static final BlockDeferredRegister BLOCKS = BlockDeferredRegister.create(TestMod.MODID); public static final BlockRegistryObject<BasicBlock, BlockItem> BASIC = BLOCKS.register("basicblock", () -> new BasicBlock("basicblock")); public static final BlockRegistryObject<BasicTileEntityBlock, BlockItem> BASIC_TILEENTITY = BLOCKS.register("tileentity", () -> new BasicTileEntityBlock("tileentity")); public static final BlockRegistryObject<BasicEnergyCreatorBlock, BlockItem> BASIC_ENERGY_CREATOR = BLOCKS.register("energy_creator", () -> new BasicEnergyCreatorBlock("energy_creator")); public static final BlockRegistryObject<BasicFluidInventoryBlock, BlockItem> BASIC_FLUID_INVENTORY = BLOCKS.register("fluid_inventory", () -> new BasicFluidInventoryBlock("fluid_inventory")); public static void register(IEventBus bus) { BLOCKS.register(bus); } }
package info.u_team.u_team_test.init; import info.u_team.u_team_core.util.registry.*; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.block.*; import net.minecraft.item.BlockItem; import net.minecraftforge.eventbus.api.IEventBus; public class TestBlocks { public static final BlockDeferredRegister BLOCKS = BlockDeferredRegister.create(TestMod.MODID); public static final BlockRegistryObject<BasicBlock, BlockItem> BASIC = BLOCKS.register("basicblock", BasicBlock::new); public static final BlockRegistryObject<BasicTileEntityBlock, BlockItem> BASIC_TILEENTITY = BLOCKS.register("tileentity", BasicTileEntityBlock::new); public static final BlockRegistryObject<BasicEnergyCreatorBlock, BlockItem> BASIC_ENERGY_CREATOR = BLOCKS.register("energy_creator", BasicEnergyCreatorBlock::new); public static final BlockRegistryObject<BasicFluidInventoryBlock, BlockItem> BASIC_FLUID_INVENTORY = BLOCKS.register("fluid_inventory", BasicFluidInventoryBlock::new); public static void register(IEventBus bus) { BLOCKS.register(bus); } }
Update test block with new method references
Update test block with new method references
Java
apache-2.0
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
java
## Code Before: package info.u_team.u_team_test.init; import info.u_team.u_team_core.util.registry.*; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.block.*; import net.minecraft.item.BlockItem; import net.minecraftforge.eventbus.api.IEventBus; public class TestBlocks { public static final BlockDeferredRegister BLOCKS = BlockDeferredRegister.create(TestMod.MODID); public static final BlockRegistryObject<BasicBlock, BlockItem> BASIC = BLOCKS.register("basicblock", () -> new BasicBlock("basicblock")); public static final BlockRegistryObject<BasicTileEntityBlock, BlockItem> BASIC_TILEENTITY = BLOCKS.register("tileentity", () -> new BasicTileEntityBlock("tileentity")); public static final BlockRegistryObject<BasicEnergyCreatorBlock, BlockItem> BASIC_ENERGY_CREATOR = BLOCKS.register("energy_creator", () -> new BasicEnergyCreatorBlock("energy_creator")); public static final BlockRegistryObject<BasicFluidInventoryBlock, BlockItem> BASIC_FLUID_INVENTORY = BLOCKS.register("fluid_inventory", () -> new BasicFluidInventoryBlock("fluid_inventory")); public static void register(IEventBus bus) { BLOCKS.register(bus); } } ## Instruction: Update test block with new method references ## Code After: package info.u_team.u_team_test.init; import info.u_team.u_team_core.util.registry.*; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.block.*; import net.minecraft.item.BlockItem; import net.minecraftforge.eventbus.api.IEventBus; public class TestBlocks { public static final BlockDeferredRegister BLOCKS = BlockDeferredRegister.create(TestMod.MODID); public static final BlockRegistryObject<BasicBlock, BlockItem> BASIC = BLOCKS.register("basicblock", BasicBlock::new); public static final BlockRegistryObject<BasicTileEntityBlock, BlockItem> BASIC_TILEENTITY = BLOCKS.register("tileentity", BasicTileEntityBlock::new); public static final BlockRegistryObject<BasicEnergyCreatorBlock, BlockItem> BASIC_ENERGY_CREATOR = BLOCKS.register("energy_creator", BasicEnergyCreatorBlock::new); public static final BlockRegistryObject<BasicFluidInventoryBlock, BlockItem> BASIC_FLUID_INVENTORY = BLOCKS.register("fluid_inventory", BasicFluidInventoryBlock::new); public static void register(IEventBus bus) { BLOCKS.register(bus); } }
// ... existing code ... public static final BlockDeferredRegister BLOCKS = BlockDeferredRegister.create(TestMod.MODID); public static final BlockRegistryObject<BasicBlock, BlockItem> BASIC = BLOCKS.register("basicblock", BasicBlock::new); public static final BlockRegistryObject<BasicTileEntityBlock, BlockItem> BASIC_TILEENTITY = BLOCKS.register("tileentity", BasicTileEntityBlock::new); public static final BlockRegistryObject<BasicEnergyCreatorBlock, BlockItem> BASIC_ENERGY_CREATOR = BLOCKS.register("energy_creator", BasicEnergyCreatorBlock::new); public static final BlockRegistryObject<BasicFluidInventoryBlock, BlockItem> BASIC_FLUID_INVENTORY = BLOCKS.register("fluid_inventory", BasicFluidInventoryBlock::new); public static void register(IEventBus bus) { BLOCKS.register(bus); // ... rest of the code ...
abaa882aaa1b7e251d989d60391bd2e06801c2a2
py/desiUtil/install/most_recent_tag.py
py/desiUtil/install/most_recent_tag.py
from __future__ import absolute_import, division, print_function, unicode_literals # The line above will help with 2to3 support. def most_recent_tag(tags,username=None): """Scan an SVN tags directory and return the most recent tag. Parameters ---------- tags : str A URL pointing to an SVN tags directory. username : str, optional If set, pass the value to SVN's ``--username`` option. Returns ------- most_recent_tag : str The most recent tag found in ``tags``. """ from subprocess import Popen, PIPE command = ['svn'] if username is not None: command += ['--username', username] command += ['ls',tags] proc = Popen(command,stdout=PIPE,stderr=PIPE) out, err = proc.communicate() try: mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1] except IndexError: mrt = '0.0.0' return mrt
from __future__ import absolute_import, division, print_function, unicode_literals # The line above will help with 2to3 support. def most_recent_tag(tags,username=None): """Scan an SVN tags directory and return the most recent tag. Parameters ---------- tags : str A URL pointing to an SVN tags directory. username : str, optional If set, pass the value to SVN's ``--username`` option. Returns ------- most_recent_tag : str The most recent tag found in ``tags``. """ from distutils.version import StrictVersion as V from subprocess import Popen, PIPE command = ['svn'] if username is not None: command += ['--username', username] command += ['ls',tags] proc = Popen(command,stdout=PIPE,stderr=PIPE) out, err = proc.communicate() try: mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0], key=lambda x: V(x))[-1] except IndexError: mrt = '0.0.0' return mrt
Add more careful version checks
Add more careful version checks
Python
bsd-3-clause
desihub/desiutil,desihub/desiutil
python
## Code Before: from __future__ import absolute_import, division, print_function, unicode_literals # The line above will help with 2to3 support. def most_recent_tag(tags,username=None): """Scan an SVN tags directory and return the most recent tag. Parameters ---------- tags : str A URL pointing to an SVN tags directory. username : str, optional If set, pass the value to SVN's ``--username`` option. Returns ------- most_recent_tag : str The most recent tag found in ``tags``. """ from subprocess import Popen, PIPE command = ['svn'] if username is not None: command += ['--username', username] command += ['ls',tags] proc = Popen(command,stdout=PIPE,stderr=PIPE) out, err = proc.communicate() try: mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1] except IndexError: mrt = '0.0.0' return mrt ## Instruction: Add more careful version checks ## Code After: from __future__ import absolute_import, division, print_function, unicode_literals # The line above will help with 2to3 support. def most_recent_tag(tags,username=None): """Scan an SVN tags directory and return the most recent tag. Parameters ---------- tags : str A URL pointing to an SVN tags directory. username : str, optional If set, pass the value to SVN's ``--username`` option. Returns ------- most_recent_tag : str The most recent tag found in ``tags``. """ from distutils.version import StrictVersion as V from subprocess import Popen, PIPE command = ['svn'] if username is not None: command += ['--username', username] command += ['ls',tags] proc = Popen(command,stdout=PIPE,stderr=PIPE) out, err = proc.communicate() try: mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0], key=lambda x: V(x))[-1] except IndexError: mrt = '0.0.0' return mrt
... most_recent_tag : str The most recent tag found in ``tags``. """ from distutils.version import StrictVersion as V from subprocess import Popen, PIPE command = ['svn'] if username is not None: ... proc = Popen(command,stdout=PIPE,stderr=PIPE) out, err = proc.communicate() try: mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0], key=lambda x: V(x))[-1] except IndexError: mrt = '0.0.0' return mrt ...
6ad9eb749dcae5a1b2e3d5a3b4cd783c9c8c7224
cqrs/artifact_view.h
cqrs/artifact_view.h
namespace cddd { namespace cqrs { template<class> class basic_artifact_view; template<class DomainEventDispatcher, class DomainEventContainer> class basic_artifact_view<basic_artifact<DomainEventDispatcher, DomainEventContainer>> { public: using id_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::id_type; using size_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::size_type; const id_type &id() const { return artifact_.id(); } size_type revision() const { return artifact_.revision(); } template<class Evt> inline void apply_change(Evt &&e) { using std::forward; artifact_.apply_change(forward<Evt>(e)); } protected: explicit inline basic_artifact_view(basic_artifact<DomainEventDispatcher, DomainEventContainer> &a) : artifact_{a} { } template<class Fun> void add_handler(Fun f) { using std::move; artifact_.add_handler(move(f)); } private: basic_artifact<DomainEventDispatcher, DomainEventContainer> &artifact_; }; typedef basic_artifact_view<artifact> artifact_view; } }
namespace cddd { namespace cqrs { template<class> class basic_artifact_view; template<class DomainEventDispatcher, class DomainEventContainer> class basic_artifact_view<basic_artifact<DomainEventDispatcher, DomainEventContainer>> { public: using id_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::id_type; using size_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::size_type; const id_type &id() const { return artifact_.id(); } size_type revision() const { return artifact_.revision(); } template<class Evt> inline auto apply_change(Evt &&e) { using std::forward; return artifact_.apply_change(forward<Evt>(e)); } protected: explicit inline basic_artifact_view(basic_artifact<DomainEventDispatcher, DomainEventContainer> &a) : artifact_{a} { } template<class Fun> void add_handler(Fun f) { using std::move; artifact_.add_handler(move(f)); } private: basic_artifact<DomainEventDispatcher, DomainEventContainer> &artifact_; }; typedef basic_artifact_view<artifact> artifact_view; } }
Refactor artifact view to provide event pointer created by the artifact after applying a change
Refactor artifact view to provide event pointer created by the artifact after applying a change
C
mit
skizzay/cddd,skizzay/cddd
c
## Code Before: namespace cddd { namespace cqrs { template<class> class basic_artifact_view; template<class DomainEventDispatcher, class DomainEventContainer> class basic_artifact_view<basic_artifact<DomainEventDispatcher, DomainEventContainer>> { public: using id_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::id_type; using size_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::size_type; const id_type &id() const { return artifact_.id(); } size_type revision() const { return artifact_.revision(); } template<class Evt> inline void apply_change(Evt &&e) { using std::forward; artifact_.apply_change(forward<Evt>(e)); } protected: explicit inline basic_artifact_view(basic_artifact<DomainEventDispatcher, DomainEventContainer> &a) : artifact_{a} { } template<class Fun> void add_handler(Fun f) { using std::move; artifact_.add_handler(move(f)); } private: basic_artifact<DomainEventDispatcher, DomainEventContainer> &artifact_; }; typedef basic_artifact_view<artifact> artifact_view; } } ## Instruction: Refactor artifact view to provide event pointer created by the artifact after applying a change ## Code After: namespace cddd { namespace cqrs { template<class> class basic_artifact_view; template<class DomainEventDispatcher, class DomainEventContainer> class basic_artifact_view<basic_artifact<DomainEventDispatcher, DomainEventContainer>> { public: using id_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::id_type; using size_type = typename basic_artifact<DomainEventDispatcher, DomainEventContainer>::size_type; const id_type &id() const { return artifact_.id(); } size_type revision() const { return artifact_.revision(); } template<class Evt> inline auto apply_change(Evt &&e) { using std::forward; return artifact_.apply_change(forward<Evt>(e)); } protected: explicit inline basic_artifact_view(basic_artifact<DomainEventDispatcher, DomainEventContainer> &a) : artifact_{a} { } template<class Fun> void add_handler(Fun f) { using std::move; artifact_.add_handler(move(f)); } private: basic_artifact<DomainEventDispatcher, DomainEventContainer> &artifact_; }; typedef basic_artifact_view<artifact> artifact_view; } }
// ... existing code ... } template<class Evt> inline auto apply_change(Evt &&e) { using std::forward; return artifact_.apply_change(forward<Evt>(e)); } protected: // ... rest of the code ...
01140deea21cda2b7adfcbeea393780ebc402bb5
ncdc_count/src/main/java/nz/org/hikari/cat/ncdc_count/NcdcRecordCountMapper.java
ncdc_count/src/main/java/nz/org/hikari/cat/ncdc_count/NcdcRecordCountMapper.java
package nz.org.hikari.cat.ncdc_count; // Mapper for NCDC record count MapReduce job, called from NcdcRecordCount // driver class import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; public class NcdcRecordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> { @Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String id = line.substring(4, 10) + "-" + line.substring(10, 15); context.write(new Text(id), new LongWritable(1L)); } }
package nz.org.hikari.cat.ncdc_count; // Mapper for NCDC record count MapReduce job, called from NcdcRecordCount // driver class import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; public class NcdcRecordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> { static final LongWritable one = new LongWritable(1); @Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String id = line.substring(4, 10) + "-" + line.substring(10, 15); context.write(new Text(id), one); } }
Create static LongWritable(1) outside mapper
ncdc_count: Create static LongWritable(1) outside mapper
Java
mit
simonbrady/cat,simonbrady/cat
java
## Code Before: package nz.org.hikari.cat.ncdc_count; // Mapper for NCDC record count MapReduce job, called from NcdcRecordCount // driver class import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; public class NcdcRecordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> { @Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String id = line.substring(4, 10) + "-" + line.substring(10, 15); context.write(new Text(id), new LongWritable(1L)); } } ## Instruction: ncdc_count: Create static LongWritable(1) outside mapper ## Code After: package nz.org.hikari.cat.ncdc_count; // Mapper for NCDC record count MapReduce job, called from NcdcRecordCount // driver class import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; public class NcdcRecordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> { static final LongWritable one = new LongWritable(1); @Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String id = line.substring(4, 10) + "-" + line.substring(10, 15); context.write(new Text(id), one); } }
// ... existing code ... public class NcdcRecordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> { static final LongWritable one = new LongWritable(1); @Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String id = line.substring(4, 10) + "-" + line.substring(10, 15); context.write(new Text(id), one); } } // ... rest of the code ...
7f8898a4ba9bcfa83b43ceac628b226ba601115d
app/src/main/java/com/jonasgerdes/schauburgr/usecase/home/movies/movie_list/MovieListAdapter.java
app/src/main/java/com/jonasgerdes/schauburgr/usecase/home/movies/movie_list/MovieListAdapter.java
package com.jonasgerdes.schauburgr.usecase.home.movies.movie_list; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.jonasgerdes.schauburgr.R; import com.jonasgerdes.schauburgr.model.Movie; import java.util.ArrayList; import java.util.List; /** * Created by jonas on 05.03.2017. */ public class MovieListAdapter extends RecyclerView.Adapter<MovieHolder> { private List<Movie> mMovies = new ArrayList<>(); @Override public MovieHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()) .inflate(R.layout.home_movies_item_movie, parent, false); return new MovieHolder(view); } @Override public void onBindViewHolder(MovieHolder holder, int position) { Movie movie = mMovies.get(position); holder.onBind(movie); } @Override public int getItemCount() { return mMovies.size(); } public void setMovies(List<Movie> movies) { mMovies = movies; notifyDataSetChanged(); } }
package com.jonasgerdes.schauburgr.usecase.home.movies.movie_list; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.jonasgerdes.schauburgr.R; import com.jonasgerdes.schauburgr.model.Movie; import java.util.ArrayList; import java.util.List; import io.realm.RealmChangeListener; import io.realm.RealmResults; /** * Created by jonas on 05.03.2017. */ public class MovieListAdapter extends RecyclerView.Adapter<MovieHolder> { private List<Movie> mMovies = new ArrayList<>(); @Override public MovieHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()) .inflate(R.layout.home_movies_item_movie, parent, false); return new MovieHolder(view); } @Override public void onBindViewHolder(MovieHolder holder, int position) { Movie movie = mMovies.get(position); holder.onBind(movie); } @Override public int getItemCount() { return mMovies.size(); } public void setMovies(RealmResults<Movie> movies) { mMovies = movies; notifyDataSetChanged(); movies.addChangeListener(new RealmChangeListener<RealmResults<Movie>>() { @Override public void onChange(RealmResults<Movie> element) { notifyDataSetChanged(); } }); } }
Add automatic refresh of movies when new data is available
Add automatic refresh of movies when new data is available
Java
mit
JGerdes/Schauburgr
java
## Code Before: package com.jonasgerdes.schauburgr.usecase.home.movies.movie_list; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.jonasgerdes.schauburgr.R; import com.jonasgerdes.schauburgr.model.Movie; import java.util.ArrayList; import java.util.List; /** * Created by jonas on 05.03.2017. */ public class MovieListAdapter extends RecyclerView.Adapter<MovieHolder> { private List<Movie> mMovies = new ArrayList<>(); @Override public MovieHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()) .inflate(R.layout.home_movies_item_movie, parent, false); return new MovieHolder(view); } @Override public void onBindViewHolder(MovieHolder holder, int position) { Movie movie = mMovies.get(position); holder.onBind(movie); } @Override public int getItemCount() { return mMovies.size(); } public void setMovies(List<Movie> movies) { mMovies = movies; notifyDataSetChanged(); } } ## Instruction: Add automatic refresh of movies when new data is available ## Code After: package com.jonasgerdes.schauburgr.usecase.home.movies.movie_list; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.jonasgerdes.schauburgr.R; import com.jonasgerdes.schauburgr.model.Movie; import java.util.ArrayList; import java.util.List; import io.realm.RealmChangeListener; import io.realm.RealmResults; /** * Created by jonas on 05.03.2017. */ public class MovieListAdapter extends RecyclerView.Adapter<MovieHolder> { private List<Movie> mMovies = new ArrayList<>(); @Override public MovieHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()) .inflate(R.layout.home_movies_item_movie, parent, false); return new MovieHolder(view); } @Override public void onBindViewHolder(MovieHolder holder, int position) { Movie movie = mMovies.get(position); holder.onBind(movie); } @Override public int getItemCount() { return mMovies.size(); } public void setMovies(RealmResults<Movie> movies) { mMovies = movies; notifyDataSetChanged(); movies.addChangeListener(new RealmChangeListener<RealmResults<Movie>>() { @Override public void onChange(RealmResults<Movie> element) { notifyDataSetChanged(); } }); } }
... import java.util.ArrayList; import java.util.List; import io.realm.RealmChangeListener; import io.realm.RealmResults; /** * Created by jonas on 05.03.2017. ... return mMovies.size(); } public void setMovies(RealmResults<Movie> movies) { mMovies = movies; notifyDataSetChanged(); movies.addChangeListener(new RealmChangeListener<RealmResults<Movie>>() { @Override public void onChange(RealmResults<Movie> element) { notifyDataSetChanged(); } }); } } ...
24bf05a0329fc0fc75d9e110cd72a2ef832cb4e2
src/main/java/me/rkfg/xmpp/bot/plugins/game/command/ListPlayersCommand.java
src/main/java/me/rkfg/xmpp/bot/plugins/game/command/ListPlayersCommand.java
package me.rkfg.xmpp.bot.plugins.game.command; import static me.rkfg.xmpp.bot.plugins.game.misc.Utils.*; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.IntStream; import java.util.stream.Stream; import me.rkfg.xmpp.bot.plugins.game.IPlayer; import me.rkfg.xmpp.bot.plugins.game.World; public class ListPlayersCommand implements ICommandHandler { @Override public Optional<String> exec(IPlayer player, Stream<String> args) { List<IPlayer> playersList = World.THIS.listPlayers(); if (playersList.isEmpty()) { return Optional.of("Игроков нет."); } return IntStream.range(0, playersList.size()).mapToObj(i -> { final IPlayer p = playersList.get(i); return "" + (i + 1) + ": " + p.getName() + (p.isAlive() ? "" : " [мёртв]") + (p == player ? " [вы]" : ""); }).reduce(commaReducer).map(list -> "Игроки: " + list); } @Override public Optional<String> getHelp() { return Optional.of("Вывести список всех участников."); } @Override public Collection<String> getCommand() { return Arrays.asList("игроки"); } }
package me.rkfg.xmpp.bot.plugins.game.command; import static me.rkfg.xmpp.bot.plugins.game.misc.Utils.*; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.IntStream; import java.util.stream.Stream; import me.rkfg.xmpp.bot.plugins.game.IPlayer; import me.rkfg.xmpp.bot.plugins.game.World; public class ListPlayersCommand implements ICommandHandler { @Override public Optional<String> exec(IPlayer player, Stream<String> args) { List<IPlayer> playersList = World.THIS.listPlayers(); if (playersList.isEmpty()) { return Optional.of("Игроков нет."); } return IntStream.range(0, playersList.size()).mapToObj(i -> { final IPlayer p = playersList.get(i); return "" + (i + 1) + ": " + p.getName() + (p.isAlive() ? "" : " [мёртв]") + (p == player ? " [вы]" : ""); }).reduce(commaReducer).map(list -> "Игроки: " + list); } @Override public Optional<String> getHelp() { return Optional.of("Вывести список всех участников."); } @Override public Collection<String> getCommand() { return Arrays.asList("игроки"); } @Override public boolean deadAllowed() { return true; } }
Allow listing players if dead
Allow listing players if dead
Java
agpl-3.0
rkfg/jbot,rkfg/jbot
java
## Code Before: package me.rkfg.xmpp.bot.plugins.game.command; import static me.rkfg.xmpp.bot.plugins.game.misc.Utils.*; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.IntStream; import java.util.stream.Stream; import me.rkfg.xmpp.bot.plugins.game.IPlayer; import me.rkfg.xmpp.bot.plugins.game.World; public class ListPlayersCommand implements ICommandHandler { @Override public Optional<String> exec(IPlayer player, Stream<String> args) { List<IPlayer> playersList = World.THIS.listPlayers(); if (playersList.isEmpty()) { return Optional.of("Игроков нет."); } return IntStream.range(0, playersList.size()).mapToObj(i -> { final IPlayer p = playersList.get(i); return "" + (i + 1) + ": " + p.getName() + (p.isAlive() ? "" : " [мёртв]") + (p == player ? " [вы]" : ""); }).reduce(commaReducer).map(list -> "Игроки: " + list); } @Override public Optional<String> getHelp() { return Optional.of("Вывести список всех участников."); } @Override public Collection<String> getCommand() { return Arrays.asList("игроки"); } } ## Instruction: Allow listing players if dead ## Code After: package me.rkfg.xmpp.bot.plugins.game.command; import static me.rkfg.xmpp.bot.plugins.game.misc.Utils.*; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.IntStream; import java.util.stream.Stream; import me.rkfg.xmpp.bot.plugins.game.IPlayer; import me.rkfg.xmpp.bot.plugins.game.World; public class ListPlayersCommand implements ICommandHandler { @Override public Optional<String> exec(IPlayer player, Stream<String> args) { List<IPlayer> playersList = World.THIS.listPlayers(); if (playersList.isEmpty()) { return Optional.of("Игроков нет."); } return IntStream.range(0, playersList.size()).mapToObj(i -> { final IPlayer p = playersList.get(i); return "" + (i + 1) + ": " + p.getName() + (p.isAlive() ? "" : " [мёртв]") + (p == player ? " [вы]" : ""); }).reduce(commaReducer).map(list -> "Игроки: " + list); } @Override public Optional<String> getHelp() { return Optional.of("Вывести список всех участников."); } @Override public Collection<String> getCommand() { return Arrays.asList("игроки"); } @Override public boolean deadAllowed() { return true; } }
# ... existing code ... public Collection<String> getCommand() { return Arrays.asList("игроки"); } @Override public boolean deadAllowed() { return true; } } # ... rest of the code ...
b184b150d7ac6b953b999b84ddfc656a83afb17b
app/src/main/java/org/wikipedia/concurrency/CallbackTask.java
app/src/main/java/org/wikipedia/concurrency/CallbackTask.java
package org.wikipedia.concurrency; import android.support.annotation.NonNull; import android.support.annotation.Nullable; public class CallbackTask<T> extends SaneAsyncTask<T> { public interface Callback<T> { void success(T row); } public interface Task<T> { T execute(); } @NonNull private final Task<T> task; @Nullable private final Callback<T> callback; public static <T> void execute(@NonNull Task<T> task) { execute(task, null); } public static <T> void execute(@NonNull Task<T> task, @Nullable Callback<T> callback) { new CallbackTask<>(task, callback).execute(); } CallbackTask(@NonNull Task<T> task, @Nullable Callback<T> callback) { this.task = task; this.callback = callback; } @Override public T performTask() throws Throwable { return task.execute(); } @Override public void onFinish(T result) { super.onFinish(result); if (callback != null) { callback.success(result); } } }
package org.wikipedia.concurrency; import android.support.annotation.NonNull; import android.support.annotation.Nullable; public class CallbackTask<T> extends SaneAsyncTask<T> { public interface Callback<T> { void success(T row); } public interface Task<T> { T execute(); } @NonNull private final Task<T> task; @Nullable private Callback<T> callback; public static <T> void execute(@NonNull Task<T> task) { execute(task, null); } public static <T> void execute(@NonNull Task<T> task, @Nullable Callback<T> callback) { new CallbackTask<>(task, callback).execute(); } CallbackTask(@NonNull Task<T> task, @Nullable Callback<T> callback) { this.task = task; this.callback = callback; } @Override public T performTask() throws Throwable { return task.execute(); } @Override public void onFinish(T result) { super.onFinish(result); if (callback != null) { callback.success(result); callback = null; } } }
Fix possible memory leak in feed.
Fix possible memory leak in feed. I've observed a memory leak (on API 17) apparently stemming from the new functionality to fill in the bookmark icon when the featured article is detected to be part of a reading list. This seems to be caused by us passing an anonymous instance of a CallbackTask.Callback object from FeaturedArticleView, which contains references to $this, which contains a reference to the activity context. This patch solves the leak by modifying the CallbackTask to no longer hold on to a reference of Callback, and instead null it out immediately after it's called. Change-Id: I768e24dab7f39652ec741a5d6f9821db7de4f2bd
Java
apache-2.0
dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,wikimedia/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia
java
## Code Before: package org.wikipedia.concurrency; import android.support.annotation.NonNull; import android.support.annotation.Nullable; public class CallbackTask<T> extends SaneAsyncTask<T> { public interface Callback<T> { void success(T row); } public interface Task<T> { T execute(); } @NonNull private final Task<T> task; @Nullable private final Callback<T> callback; public static <T> void execute(@NonNull Task<T> task) { execute(task, null); } public static <T> void execute(@NonNull Task<T> task, @Nullable Callback<T> callback) { new CallbackTask<>(task, callback).execute(); } CallbackTask(@NonNull Task<T> task, @Nullable Callback<T> callback) { this.task = task; this.callback = callback; } @Override public T performTask() throws Throwable { return task.execute(); } @Override public void onFinish(T result) { super.onFinish(result); if (callback != null) { callback.success(result); } } } ## Instruction: Fix possible memory leak in feed. I've observed a memory leak (on API 17) apparently stemming from the new functionality to fill in the bookmark icon when the featured article is detected to be part of a reading list. This seems to be caused by us passing an anonymous instance of a CallbackTask.Callback object from FeaturedArticleView, which contains references to $this, which contains a reference to the activity context. This patch solves the leak by modifying the CallbackTask to no longer hold on to a reference of Callback, and instead null it out immediately after it's called. Change-Id: I768e24dab7f39652ec741a5d6f9821db7de4f2bd ## Code After: package org.wikipedia.concurrency; import android.support.annotation.NonNull; import android.support.annotation.Nullable; public class CallbackTask<T> extends SaneAsyncTask<T> { public interface Callback<T> { void success(T row); } public interface Task<T> { T execute(); } @NonNull private final Task<T> task; @Nullable private Callback<T> callback; public static <T> void execute(@NonNull Task<T> task) { execute(task, null); } public static <T> void execute(@NonNull Task<T> task, @Nullable Callback<T> callback) { new CallbackTask<>(task, callback).execute(); } CallbackTask(@NonNull Task<T> task, @Nullable Callback<T> callback) { this.task = task; this.callback = callback; } @Override public T performTask() throws Throwable { return task.execute(); } @Override public void onFinish(T result) { super.onFinish(result); if (callback != null) { callback.success(result); callback = null; } } }
... } @NonNull private final Task<T> task; @Nullable private Callback<T> callback; public static <T> void execute(@NonNull Task<T> task) { execute(task, null); ... super.onFinish(result); if (callback != null) { callback.success(result); callback = null; } } } ...
f00794b0907cebf28c2001dde632c5959a79c977
src/test/java/com/taxtelecom/chelnyedu/dropwizard/TestApplication.java
src/test/java/com/taxtelecom/chelnyedu/dropwizard/TestApplication.java
package com.taxtelecom.chelnyedu.dropwizard; import static org.fest.assertions.api.Assertions.assertThat; import javax.ws.rs.core.Response; import org.junit.Test; import com.taxtelecom.chelnyedu.dropwizard.representations.Contact; import com.taxtelecom.chelnyedu.dropwizard.resources.ContactResources; /** * Created by sagel on 06.07.17. */ public class TestApplication { private Contact contactForTest = new Contact(0, "John", "Doe", "+123456789"); @Test public void checkContact(){ ContactResources resources = new ContactResources(); Response contact = resources.getContact(0); Contact con = (Contact) contact.getEntity(); assertThat(contactForTest.getFirstName()).isEqualTo(con.getFirstName()); assertThat(contactForTest.getLastName()).isEqualTo(con.getLastName()); assertThat(contactForTest.getPhone()).isEqualTo(con.getPhone()); } }
package com.taxtelecom.chelnyedu.dropwizard; import static org.fest.assertions.api.Assertions.assertThat; import javax.ws.rs.core.Response; import org.junit.Test; import com.taxtelecom.chelnyedu.dropwizard.representations.Contact; import com.taxtelecom.chelnyedu.dropwizard.resources.ContactResources; /** * Created by sagel on 06.07.17. */ public class TestApplication { private Contact contactForTest = new Contact(0, "John", "Doe", "+123456789"); private Contact emptyContact = new Contact(); private Contact contact = new Contact(0, null, null, null); @Test public void checkContact(){ ContactResources resources = new ContactResources(); Response contact = resources.getContact(0); Contact con = (Contact) contact.getEntity(); assertThat(contactForTest.getFirstName()).isEqualTo(con.getFirstName()); assertThat(contactForTest.getLastName()).isEqualTo(con.getLastName()); assertThat(contactForTest.getPhone()).isEqualTo(con.getPhone()); assertThat(contactForTest.getId()).isEqualTo(con.getId()); } @Test public void checkEmpty(){ assertThat(emptyContact.getFirstName()).isEqualTo(contact.getFirstName()); assertThat(emptyContact.getLastName()).isEqualTo(contact.getLastName()); assertThat(emptyContact.getPhone()).isEqualTo(contact.getPhone()); assertThat(emptyContact.getId()).isEqualTo(contact.getId()); } }
Test for constructor and getId() by Sagel
Test for constructor and getId() by Sagel
Java
apache-2.0
chelnyedu/dropwizard-rest,chelnyedu/dropwizard-rest
java
## Code Before: package com.taxtelecom.chelnyedu.dropwizard; import static org.fest.assertions.api.Assertions.assertThat; import javax.ws.rs.core.Response; import org.junit.Test; import com.taxtelecom.chelnyedu.dropwizard.representations.Contact; import com.taxtelecom.chelnyedu.dropwizard.resources.ContactResources; /** * Created by sagel on 06.07.17. */ public class TestApplication { private Contact contactForTest = new Contact(0, "John", "Doe", "+123456789"); @Test public void checkContact(){ ContactResources resources = new ContactResources(); Response contact = resources.getContact(0); Contact con = (Contact) contact.getEntity(); assertThat(contactForTest.getFirstName()).isEqualTo(con.getFirstName()); assertThat(contactForTest.getLastName()).isEqualTo(con.getLastName()); assertThat(contactForTest.getPhone()).isEqualTo(con.getPhone()); } } ## Instruction: Test for constructor and getId() by Sagel ## Code After: package com.taxtelecom.chelnyedu.dropwizard; import static org.fest.assertions.api.Assertions.assertThat; import javax.ws.rs.core.Response; import org.junit.Test; import com.taxtelecom.chelnyedu.dropwizard.representations.Contact; import com.taxtelecom.chelnyedu.dropwizard.resources.ContactResources; /** * Created by sagel on 06.07.17. */ public class TestApplication { private Contact contactForTest = new Contact(0, "John", "Doe", "+123456789"); private Contact emptyContact = new Contact(); private Contact contact = new Contact(0, null, null, null); @Test public void checkContact(){ ContactResources resources = new ContactResources(); Response contact = resources.getContact(0); Contact con = (Contact) contact.getEntity(); assertThat(contactForTest.getFirstName()).isEqualTo(con.getFirstName()); assertThat(contactForTest.getLastName()).isEqualTo(con.getLastName()); assertThat(contactForTest.getPhone()).isEqualTo(con.getPhone()); assertThat(contactForTest.getId()).isEqualTo(con.getId()); } @Test public void checkEmpty(){ assertThat(emptyContact.getFirstName()).isEqualTo(contact.getFirstName()); assertThat(emptyContact.getLastName()).isEqualTo(contact.getLastName()); assertThat(emptyContact.getPhone()).isEqualTo(contact.getPhone()); assertThat(emptyContact.getId()).isEqualTo(contact.getId()); } }
... */ public class TestApplication { private Contact contactForTest = new Contact(0, "John", "Doe", "+123456789"); private Contact emptyContact = new Contact(); private Contact contact = new Contact(0, null, null, null); @Test public void checkContact(){ ... assertThat(contactForTest.getFirstName()).isEqualTo(con.getFirstName()); assertThat(contactForTest.getLastName()).isEqualTo(con.getLastName()); assertThat(contactForTest.getPhone()).isEqualTo(con.getPhone()); assertThat(contactForTest.getId()).isEqualTo(con.getId()); } @Test public void checkEmpty(){ assertThat(emptyContact.getFirstName()).isEqualTo(contact.getFirstName()); assertThat(emptyContact.getLastName()).isEqualTo(contact.getLastName()); assertThat(emptyContact.getPhone()).isEqualTo(contact.getPhone()); assertThat(emptyContact.getId()).isEqualTo(contact.getId()); } } ...
09311a99d37c8623a644acb30daf8523a5e7a196
django_auth_policy/validators.py
django_auth_policy/validators.py
from django.core.exceptions import ValidationError from django_auth_policy import settings as dap_settings def password_min_length(value): if dap_settings.PASSWORD_MIN_LENGTH_TEXT is None: return if len(value) < dap_settings.PASSWORD_MIN_LENGTH: msg = dap_settings.PASSWORD_MIN_LENGTH_TEXT.format( length=dap_settings.PASSWORD_MIN_LENGTH) raise ValidationError(msg, code='password_min_length') def password_complexity(value): if not dap_settings.PASSWORD_COMPLEXITY: return pw_set = set(value) for rule in dap_settings.PASSWORD_COMPLEXITY: if not pw_set.intersection(rule['chars']): msg = dap_settings.PASSWORD_COMPLEXITY_TEXT.format( rule_text=rule['text']) raise ValidationError(msg, 'password_complexity')
from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ from django_auth_policy import settings as dap_settings def password_min_length(value): if dap_settings.PASSWORD_MIN_LENGTH_TEXT is None: return if len(value) < dap_settings.PASSWORD_MIN_LENGTH: msg = _(dap_settings.PASSWORD_MIN_LENGTH_TEXT).format( length=dap_settings.PASSWORD_MIN_LENGTH) raise ValidationError(msg, code='password_min_length') def password_complexity(value): if not dap_settings.PASSWORD_COMPLEXITY: return pw_set = set(value) for rule in dap_settings.PASSWORD_COMPLEXITY: if not pw_set.intersection(rule['chars']): msg = _(dap_settings.PASSWORD_COMPLEXITY_TEXT).format( rule_text=_(rule['text'])) raise ValidationError(msg, 'password_complexity')
Fix translatability of validation messages when defined in custom settings
Fix translatability of validation messages when defined in custom settings
Python
bsd-3-clause
mcella/django-auth-policy,mcella/django-auth-policy,Dreamsolution/django-auth-policy,Dreamsolution/django-auth-policy
python
## Code Before: from django.core.exceptions import ValidationError from django_auth_policy import settings as dap_settings def password_min_length(value): if dap_settings.PASSWORD_MIN_LENGTH_TEXT is None: return if len(value) < dap_settings.PASSWORD_MIN_LENGTH: msg = dap_settings.PASSWORD_MIN_LENGTH_TEXT.format( length=dap_settings.PASSWORD_MIN_LENGTH) raise ValidationError(msg, code='password_min_length') def password_complexity(value): if not dap_settings.PASSWORD_COMPLEXITY: return pw_set = set(value) for rule in dap_settings.PASSWORD_COMPLEXITY: if not pw_set.intersection(rule['chars']): msg = dap_settings.PASSWORD_COMPLEXITY_TEXT.format( rule_text=rule['text']) raise ValidationError(msg, 'password_complexity') ## Instruction: Fix translatability of validation messages when defined in custom settings ## Code After: from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ from django_auth_policy import settings as dap_settings def password_min_length(value): if dap_settings.PASSWORD_MIN_LENGTH_TEXT is None: return if len(value) < dap_settings.PASSWORD_MIN_LENGTH: msg = _(dap_settings.PASSWORD_MIN_LENGTH_TEXT).format( length=dap_settings.PASSWORD_MIN_LENGTH) raise ValidationError(msg, code='password_min_length') def password_complexity(value): if not dap_settings.PASSWORD_COMPLEXITY: return pw_set = set(value) for rule in dap_settings.PASSWORD_COMPLEXITY: if not pw_set.intersection(rule['chars']): msg = _(dap_settings.PASSWORD_COMPLEXITY_TEXT).format( rule_text=_(rule['text'])) raise ValidationError(msg, 'password_complexity')
... from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ from django_auth_policy import settings as dap_settings ... return if len(value) < dap_settings.PASSWORD_MIN_LENGTH: msg = _(dap_settings.PASSWORD_MIN_LENGTH_TEXT).format( length=dap_settings.PASSWORD_MIN_LENGTH) raise ValidationError(msg, code='password_min_length') ... pw_set = set(value) for rule in dap_settings.PASSWORD_COMPLEXITY: if not pw_set.intersection(rule['chars']): msg = _(dap_settings.PASSWORD_COMPLEXITY_TEXT).format( rule_text=_(rule['text'])) raise ValidationError(msg, 'password_complexity') ...
d597da7682648b02a02e1f33cd65defdc932253a
src/main/kotlin/com/ids1024/whitakerswords/SearchAdapter.kt
src/main/kotlin/com/ids1024/whitakerswords/SearchAdapter.kt
package com.ids1024.whitakerswords import java.util.ArrayList import android.widget.TextView import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import android.support.v7.widget.RecyclerView import android.text.SpannableStringBuilder class SearchAdapter(results: ArrayList<SpannableStringBuilder>) : RecyclerView.Adapter<SearchAdapter.ViewHolder>() { var results = results override fun getItemCount(): Int { return results.size } override fun onBindViewHolder(holder: ViewHolder, position: Int) { holder.text_view.text = results.get(position) } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { val view = LayoutInflater.from(parent.getContext()).inflate(R.layout.result, null) return ViewHolder(view) } class ViewHolder(view: View) : RecyclerView.ViewHolder(view) { val text_view: TextView = view.findViewById(R.id.result_text) } }
package com.ids1024.whitakerswords import java.util.ArrayList import android.widget.TextView import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import android.support.v7.widget.RecyclerView import android.text.SpannableStringBuilder class SearchAdapter(results: ArrayList<SpannableStringBuilder>) : RecyclerView.Adapter<SearchAdapter.ViewHolder>() { var results = results override fun getItemCount(): Int { return results.size } override fun onBindViewHolder(holder: ViewHolder, position: Int) { holder.text_view.text = results.get(position) } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { val view = LayoutInflater.from(parent.context) .inflate(R.layout.result, parent, false) return ViewHolder(view) } class ViewHolder(view: View) : RecyclerView.ViewHolder(view) { val text_view: TextView = view.findViewById(R.id.result_text) } }
Make LayoutInflator.inflate() call match docs
Make LayoutInflator.inflate() call match docs Don't know if this changes anything...
Kotlin
mit
ids1024/whitakers-words-android,ids1024/whitakers-words-android
kotlin
## Code Before: package com.ids1024.whitakerswords import java.util.ArrayList import android.widget.TextView import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import android.support.v7.widget.RecyclerView import android.text.SpannableStringBuilder class SearchAdapter(results: ArrayList<SpannableStringBuilder>) : RecyclerView.Adapter<SearchAdapter.ViewHolder>() { var results = results override fun getItemCount(): Int { return results.size } override fun onBindViewHolder(holder: ViewHolder, position: Int) { holder.text_view.text = results.get(position) } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { val view = LayoutInflater.from(parent.getContext()).inflate(R.layout.result, null) return ViewHolder(view) } class ViewHolder(view: View) : RecyclerView.ViewHolder(view) { val text_view: TextView = view.findViewById(R.id.result_text) } } ## Instruction: Make LayoutInflator.inflate() call match docs Don't know if this changes anything... ## Code After: package com.ids1024.whitakerswords import java.util.ArrayList import android.widget.TextView import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import android.support.v7.widget.RecyclerView import android.text.SpannableStringBuilder class SearchAdapter(results: ArrayList<SpannableStringBuilder>) : RecyclerView.Adapter<SearchAdapter.ViewHolder>() { var results = results override fun getItemCount(): Int { return results.size } override fun onBindViewHolder(holder: ViewHolder, position: Int) { holder.text_view.text = results.get(position) } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { val view = LayoutInflater.from(parent.context) .inflate(R.layout.result, parent, false) return ViewHolder(view) } class ViewHolder(view: View) : RecyclerView.ViewHolder(view) { val text_view: TextView = view.findViewById(R.id.result_text) } }
... } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { val view = LayoutInflater.from(parent.context) .inflate(R.layout.result, parent, false) return ViewHolder(view) } ...
2ad16c44adb20e9ba023e873149d67068504c34c
saleor/cart/__init__.py
saleor/cart/__init__.py
from __future__ import unicode_literals from django.utils.translation import pgettext from satchless import cart from satchless.item import ItemList, ClassifyingPartitioner from ..product.models import DigitalShip class ShippedGroup(ItemList): ''' Group for shippable products. ''' pass class DigitalGroup(ItemList): ''' Group for digital products. ''' pass class CartPartitioner(ClassifyingPartitioner): ''' Dividing cart into groups. ''' def classify(self, item): if isinstance(item.product, DigitalShip): return 'digital' return 'shippable' def get_partition(self, classifier, items): if classifier == 'digital': return DigitalGroup(items) return ShippedGroup(items) class Cart(cart.Cart): ''' Contains cart items. Serialized instance of cart is saved into django session. ''' timestamp = None billing_address = None def __unicode__(self): return pgettext( 'Shopping cart', 'Your cart (%(cart_count)s)') % {'cart_count': self.count()}
from __future__ import unicode_literals from django.utils.translation import pgettext from satchless import cart from satchless.item import ItemList, ClassifyingPartitioner from ..product.models import DigitalShip class ShippedGroup(ItemList): ''' Group for shippable products. ''' pass class DigitalGroup(ItemList): ''' Group for digital products. ''' pass class CartPartitioner(ClassifyingPartitioner): ''' Dividing cart into groups. ''' def classify(self, item): if isinstance(item.product, DigitalShip): return 'digital' return 'shippable' def get_partition(self, classifier, items): if classifier == 'digital': return DigitalGroup(items) return ShippedGroup(items) class Cart(cart.Cart): ''' Contains cart items. Serialized instance of cart is saved into django session. ''' timestamp = None billing_address = None def __unicode__(self): return pgettext( 'Shopping cart', 'Your cart (%(cart_count)s)') % {'cart_count': self.count()} def clear(self): self._state = []
Add ability to clear cart
Add ability to clear cart
Python
bsd-3-clause
avorio/saleor,laosunhust/saleor,hongquan/saleor,maferelo/saleor,avorio/saleor,car3oon/saleor,tfroehlich82/saleor,UITools/saleor,car3oon/saleor,Drekscott/Motlaesaleor,maferelo/saleor,hongquan/saleor,josesanch/saleor,taedori81/saleor,KenMutemi/saleor,tfroehlich82/saleor,spartonia/saleor,mociepka/saleor,itbabu/saleor,UITools/saleor,laosunhust/saleor,rchav/vinerack,KenMutemi/saleor,Drekscott/Motlaesaleor,arth-co/saleor,taedori81/saleor,laosunhust/saleor,laosunhust/saleor,taedori81/saleor,tfroehlich82/saleor,hongquan/saleor,taedori81/saleor,arth-co/saleor,mociepka/saleor,HyperManTT/ECommerceSaleor,arth-co/saleor,rchav/vinerack,spartonia/saleor,UITools/saleor,KenMutemi/saleor,mociepka/saleor,josesanch/saleor,jreigel/saleor,rodrigozn/CW-Shop,dashmug/saleor,UITools/saleor,Drekscott/Motlaesaleor,jreigel/saleor,avorio/saleor,rodrigozn/CW-Shop,rodrigozn/CW-Shop,arth-co/saleor,paweltin/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,josesanch/saleor,paweltin/saleor,jreigel/saleor,paweltin/saleor,HyperManTT/ECommerceSaleor,spartonia/saleor,spartonia/saleor,itbabu/saleor,paweltin/saleor,dashmug/saleor,dashmug/saleor,avorio/saleor,rchav/vinerack,Drekscott/Motlaesaleor,maferelo/saleor,UITools/saleor,itbabu/saleor
python
## Code Before: from __future__ import unicode_literals from django.utils.translation import pgettext from satchless import cart from satchless.item import ItemList, ClassifyingPartitioner from ..product.models import DigitalShip class ShippedGroup(ItemList): ''' Group for shippable products. ''' pass class DigitalGroup(ItemList): ''' Group for digital products. ''' pass class CartPartitioner(ClassifyingPartitioner): ''' Dividing cart into groups. ''' def classify(self, item): if isinstance(item.product, DigitalShip): return 'digital' return 'shippable' def get_partition(self, classifier, items): if classifier == 'digital': return DigitalGroup(items) return ShippedGroup(items) class Cart(cart.Cart): ''' Contains cart items. Serialized instance of cart is saved into django session. ''' timestamp = None billing_address = None def __unicode__(self): return pgettext( 'Shopping cart', 'Your cart (%(cart_count)s)') % {'cart_count': self.count()} ## Instruction: Add ability to clear cart ## Code After: from __future__ import unicode_literals from django.utils.translation import pgettext from satchless import cart from satchless.item import ItemList, ClassifyingPartitioner from ..product.models import DigitalShip class ShippedGroup(ItemList): ''' Group for shippable products. ''' pass class DigitalGroup(ItemList): ''' Group for digital products. ''' pass class CartPartitioner(ClassifyingPartitioner): ''' Dividing cart into groups. ''' def classify(self, item): if isinstance(item.product, DigitalShip): return 'digital' return 'shippable' def get_partition(self, classifier, items): if classifier == 'digital': return DigitalGroup(items) return ShippedGroup(items) class Cart(cart.Cart): ''' Contains cart items. Serialized instance of cart is saved into django session. ''' timestamp = None billing_address = None def __unicode__(self): return pgettext( 'Shopping cart', 'Your cart (%(cart_count)s)') % {'cart_count': self.count()} def clear(self): self._state = []
... return pgettext( 'Shopping cart', 'Your cart (%(cart_count)s)') % {'cart_count': self.count()} def clear(self): self._state = [] ...
e229779753f3c5f44319d882d19feab324abe119
api/migrations/0011_user_preferences_update_troposphere_user.py
api/migrations/0011_user_preferences_update_troposphere_user.py
from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('api', '0010_sitemetadata_site_footer_link'), ] operations = [ # migrations.RunSQL( # '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''', # reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED''' # ), # migrations.RunSQL( # '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''', # reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''' # ), ]
from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('api', '0010_sitemetadata_site_footer_link'), ] # These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is. operations = [ # migrations.RunSQL( # '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''', # reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED''' # ), # migrations.RunSQL( # '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''', # reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''' # ), ]
Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
Python
apache-2.0
CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend
python
## Code Before: from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('api', '0010_sitemetadata_site_footer_link'), ] operations = [ # migrations.RunSQL( # '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''', # reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED''' # ), # migrations.RunSQL( # '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''', # reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''' # ), ] ## Instruction: Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream) ## Code After: from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('api', '0010_sitemetadata_site_footer_link'), ] # These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is. operations = [ # migrations.RunSQL( # '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''', # reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED''' # ), # migrations.RunSQL( # '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id # FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''', # reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''' # ), ]
// ... existing code ... ('api', '0010_sitemetadata_site_footer_link'), ] # These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is. operations = [ # migrations.RunSQL( # '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''', // ... rest of the code ...
db362bd50c7b8aa6a40a809346eff70df846f82d
setup.py
setup.py
from setuptools import setup setup( name='pysuru', version='0.0.1', description='Python library to interact with Tsuru API', long_description=open('README.rst', 'r').read(), keywords='tsuru', author='Rodrigo Machado', author_email='[email protected]', url='https://github.com/rcmachado/pysuru', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers' 'Topic :: Software Development :: Libraries', ], install_requires=[ 'urllib3>=1.15' ] packages=['pysuru'], platforms=['linux', 'osx'] )
from setuptools import setup setup( name='pysuru', version='0.0.1', description='Python library to interact with Tsuru API', long_description=open('README.rst', 'r').read(), keywords='tsuru', author='Rodrigo Machado', author_email='[email protected]', url='https://github.com/rcmachado/pysuru', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers' 'Topic :: Software Development :: Libraries', ], install_requires=[ 'urllib3>=1.15', 'certifi' ] packages=['pysuru'], platforms=['linux', 'osx'] )
Add certifi to required packages
Add certifi to required packages
Python
mit
rcmachado/pysuru
python
## Code Before: from setuptools import setup setup( name='pysuru', version='0.0.1', description='Python library to interact with Tsuru API', long_description=open('README.rst', 'r').read(), keywords='tsuru', author='Rodrigo Machado', author_email='[email protected]', url='https://github.com/rcmachado/pysuru', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers' 'Topic :: Software Development :: Libraries', ], install_requires=[ 'urllib3>=1.15' ] packages=['pysuru'], platforms=['linux', 'osx'] ) ## Instruction: Add certifi to required packages ## Code After: from setuptools import setup setup( name='pysuru', version='0.0.1', description='Python library to interact with Tsuru API', long_description=open('README.rst', 'r').read(), keywords='tsuru', author='Rodrigo Machado', author_email='[email protected]', url='https://github.com/rcmachado/pysuru', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers' 'Topic :: Software Development :: Libraries', ], install_requires=[ 'urllib3>=1.15', 'certifi' ] packages=['pysuru'], platforms=['linux', 'osx'] )
# ... existing code ... 'Topic :: Software Development :: Libraries', ], install_requires=[ 'urllib3>=1.15', 'certifi' ] packages=['pysuru'], platforms=['linux', 'osx'] # ... rest of the code ...
5749d976dee7d8a51e25842b528448a077a8f800
report_compassion/models/ir_actions_report.py
report_compassion/models/ir_actions_report.py
from odoo import models, api class IrActionsReport(models.Model): _inherit = "ir.actions.report" @api.multi def behaviour(self): """ Change behaviour to return user preference in priority. :return: report action for printing. """ result = super().behaviour() # Retrieve user default values user = self.env.user if user.printing_action: default_action = user.printing_action for key, val in result.iteritems(): result[key]["action"] = default_action if user.printing_printer_id: default_printer = user.printing_printer_id for key, val in result.iteritems(): result[key]["printer"] = default_printer return result
from odoo import models, api class IrActionsReport(models.Model): _inherit = "ir.actions.report" @api.multi def behaviour(self): """ Change behaviour to return user preference in priority. :return: report action for printing. """ result = super().behaviour() # Retrieve user default values result.update(self._get_user_default_print_behaviour()) return result
FIX default behaviour of printing
FIX default behaviour of printing
Python
agpl-3.0
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland
python
## Code Before: from odoo import models, api class IrActionsReport(models.Model): _inherit = "ir.actions.report" @api.multi def behaviour(self): """ Change behaviour to return user preference in priority. :return: report action for printing. """ result = super().behaviour() # Retrieve user default values user = self.env.user if user.printing_action: default_action = user.printing_action for key, val in result.iteritems(): result[key]["action"] = default_action if user.printing_printer_id: default_printer = user.printing_printer_id for key, val in result.iteritems(): result[key]["printer"] = default_printer return result ## Instruction: FIX default behaviour of printing ## Code After: from odoo import models, api class IrActionsReport(models.Model): _inherit = "ir.actions.report" @api.multi def behaviour(self): """ Change behaviour to return user preference in priority. :return: report action for printing. """ result = super().behaviour() # Retrieve user default values result.update(self._get_user_default_print_behaviour()) return result
... result = super().behaviour() # Retrieve user default values result.update(self._get_user_default_print_behaviour()) return result ...
81908e5f6304cc1c8e8627b0d4c859df194cc36d
ynr/apps/resultsbot/management/commands/store_modgov_urls.py
ynr/apps/resultsbot/management/commands/store_modgov_urls.py
import csv import os from django.core.management.base import BaseCommand import resultsbot from elections.models import Election class Command(BaseCommand): def handle(self, **options): """ Stores possible modgov urls stored in CSV file against the related election objects """ path = os.path.join( os.path.dirname(resultsbot.__file__), "election_id_to_url.csv" ) with open(path) as f: csv_file = csv.reader(f) for line in csv_file: try: election = Election.objects.get(slug=line[0]) election.modgov_url = line[1] election.save() except (IndexError, Election.DoesNotExist): continue
import csv import os from django.core.management.base import BaseCommand import resultsbot from elections.models import Election class Command(BaseCommand): def handle(self, **options): """ Stores possible modgov urls stored in CSV file against the related election objects """ # remove existing values first as this allows us to remove bad urls from the csv file Election.objects.update(modgov_url=None) path = os.path.join( os.path.dirname(resultsbot.__file__), "election_id_to_url.csv" ) with open(path) as f: csv_file = csv.reader(f) for line in csv_file: try: election = Election.objects.get(slug=line[0]) election.modgov_url = line[1] election.save() except (IndexError, Election.DoesNotExist): continue
Delete existing urls before each run
Delete existing urls before each run
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
python
## Code Before: import csv import os from django.core.management.base import BaseCommand import resultsbot from elections.models import Election class Command(BaseCommand): def handle(self, **options): """ Stores possible modgov urls stored in CSV file against the related election objects """ path = os.path.join( os.path.dirname(resultsbot.__file__), "election_id_to_url.csv" ) with open(path) as f: csv_file = csv.reader(f) for line in csv_file: try: election = Election.objects.get(slug=line[0]) election.modgov_url = line[1] election.save() except (IndexError, Election.DoesNotExist): continue ## Instruction: Delete existing urls before each run ## Code After: import csv import os from django.core.management.base import BaseCommand import resultsbot from elections.models import Election class Command(BaseCommand): def handle(self, **options): """ Stores possible modgov urls stored in CSV file against the related election objects """ # remove existing values first as this allows us to remove bad urls from the csv file Election.objects.update(modgov_url=None) path = os.path.join( os.path.dirname(resultsbot.__file__), "election_id_to_url.csv" ) with open(path) as f: csv_file = csv.reader(f) for line in csv_file: try: election = Election.objects.get(slug=line[0]) election.modgov_url = line[1] election.save() except (IndexError, Election.DoesNotExist): continue
// ... existing code ... """ Stores possible modgov urls stored in CSV file against the related election objects """ # remove existing values first as this allows us to remove bad urls from the csv file Election.objects.update(modgov_url=None) path = os.path.join( os.path.dirname(resultsbot.__file__), "election_id_to_url.csv" ) // ... rest of the code ...
bd23a87d28a1d0a1f82b0fd17abfababafba0dc7
viaduct/api/page.py
viaduct/api/page.py
from flask.ext.login import current_user from viaduct.models.page import Page, PagePermission, PageRevision from viaduct import db from flask import request, url_for, render_template from viaduct.models.group import Group class PageAPI: @staticmethod def remove_page(path): page = Page.query.filter(Page.path==path).first() if not page: return False for rev in page.revisions.all(): db.session.delete(rev) for perm in page.permissions.all(): db.session.delete(perm) db.session.commit() db.session.delete(page) db.session.commit() return True @staticmethod def get_footer(): footer = Page.query.filter(Page.path == 'footer').first() if not footer: footer = Page('footer') if footer.revisions.count() > 0: revision = footer.revisions.order_by(PageRevision.id.desc()).first() exists = True else: revision = PageRevision(footer, current_user, '', '<b> No footer found </b>' '', True) exists = False print vars(footer) return render_template('page/get_footer.htm', footer_revision=revision, footer=footer, exists=exists)
from flask.ext.login import current_user from viaduct.models.page import Page, PageRevision from viaduct import db from flask import render_template class PageAPI: @staticmethod def remove_page(path): page = Page.query.filter(Page.path == path).first() if not page: return False for rev in page.revisions.all(): db.session.delete(rev) for perm in page.permissions.all(): db.session.delete(perm) db.session.commit() db.session.delete(page) db.session.commit() return True @staticmethod def get_footer(): footer = Page.query.filter(Page.path == 'footer').first() if not footer: footer = Page('footer') if footer.revisions.count() > 0: revision = footer.revisions.order_by(PageRevision.id.desc()).\ first() exists = True else: revision = PageRevision(footer, current_user, '', '<b> No footer found </b>' '', True) exists = False return render_template('page/get_footer.htm', footer_revision=revision, footer=footer, exists=exists)
Remove footer print and make file PEP8 compliant
Remove footer print and make file PEP8 compliant
Python
mit
viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct
python
## Code Before: from flask.ext.login import current_user from viaduct.models.page import Page, PagePermission, PageRevision from viaduct import db from flask import request, url_for, render_template from viaduct.models.group import Group class PageAPI: @staticmethod def remove_page(path): page = Page.query.filter(Page.path==path).first() if not page: return False for rev in page.revisions.all(): db.session.delete(rev) for perm in page.permissions.all(): db.session.delete(perm) db.session.commit() db.session.delete(page) db.session.commit() return True @staticmethod def get_footer(): footer = Page.query.filter(Page.path == 'footer').first() if not footer: footer = Page('footer') if footer.revisions.count() > 0: revision = footer.revisions.order_by(PageRevision.id.desc()).first() exists = True else: revision = PageRevision(footer, current_user, '', '<b> No footer found </b>' '', True) exists = False print vars(footer) return render_template('page/get_footer.htm', footer_revision=revision, footer=footer, exists=exists) ## Instruction: Remove footer print and make file PEP8 compliant ## Code After: from flask.ext.login import current_user from viaduct.models.page import Page, PageRevision from viaduct import db from flask import render_template class PageAPI: @staticmethod def remove_page(path): page = Page.query.filter(Page.path == path).first() if not page: return False for rev in page.revisions.all(): db.session.delete(rev) for perm in page.permissions.all(): db.session.delete(perm) db.session.commit() db.session.delete(page) db.session.commit() return True @staticmethod def get_footer(): footer = Page.query.filter(Page.path == 'footer').first() if not footer: footer = Page('footer') if footer.revisions.count() > 0: revision = footer.revisions.order_by(PageRevision.id.desc()).\ first() exists = True else: revision = PageRevision(footer, current_user, '', '<b> No footer found </b>' '', True) exists = False return render_template('page/get_footer.htm', footer_revision=revision, footer=footer, exists=exists)
# ... existing code ... from flask.ext.login import current_user from viaduct.models.page import Page, PageRevision from viaduct import db from flask import render_template class PageAPI: @staticmethod def remove_page(path): page = Page.query.filter(Page.path == path).first() if not page: return False # ... modified code ... footer = Page('footer') if footer.revisions.count() > 0: revision = footer.revisions.order_by(PageRevision.id.desc()).\ first() exists = True else: revision = PageRevision(footer, current_user, '', '<b> No footer found </b>' '', True) exists = False return render_template('page/get_footer.htm', footer_revision=revision, footer=footer, exists=exists) # ... rest of the code ...
74201b3f0de435b167786fe31159f9217e860a79
Lagrangian/L3TestResult.h
Lagrangian/L3TestResult.h
// L3TestResult.h // Created by Rob Rix on 2012-11-12. // Copyright (c) 2012 Rob Rix. All rights reserved. #import <Foundation/Foundation.h> @interface L3TestResult : NSObject +(instancetype)testResultWithName:(NSString *)name startDate:(NSDate *)startDate; @property (strong, nonatomic) L3TestResult *parent; // this really only exists to make stacks easier; nil it out on pop or you get retain cycles @property (copy, nonatomic, readonly) NSString *name; @property (strong, nonatomic, readonly) NSDate *startDate; @property (assign, nonatomic) NSTimeInterval duration; @property (assign, nonatomic) NSUInteger testCaseCount; @property (assign, nonatomic) NSUInteger assertionCount; @property (assign, nonatomic) NSUInteger assertionFailureCount; @property (assign, nonatomic) NSUInteger exceptionCount; @property (nonatomic, readonly) bool succeeded; @property (nonatomic, readonly) bool failed; @property (nonatomic, readonly) NSArray *testResults; -(void)addTestResult:(L3TestResult *)testResult; @end
// L3TestResult.h // Created by Rob Rix on 2012-11-12. // Copyright (c) 2012 Rob Rix. All rights reserved. #import <Foundation/Foundation.h> @interface L3TestResult : NSObject +(instancetype)testResultWithName:(NSString *)name startDate:(NSDate *)startDate; @property (strong, nonatomic) L3TestResult *parent; // this really only exists to make stacks easier; nil it out on pop or you get retain cycles @property (copy, nonatomic, readonly) NSString *name; @property (strong, nonatomic, readonly) NSDate *startDate; @property (strong, nonatomic) NSDate *endDate; @property (assign, nonatomic) NSTimeInterval duration; @property (assign, nonatomic) NSUInteger testCaseCount; @property (assign, nonatomic) NSUInteger assertionCount; @property (assign, nonatomic) NSUInteger assertionFailureCount; @property (assign, nonatomic) NSUInteger exceptionCount; @property (nonatomic, readonly) bool succeeded; @property (nonatomic, readonly) bool failed; @property (nonatomic, readonly) NSArray *testResults; -(void)addTestResult:(L3TestResult *)testResult; @end
Test results have an end date.
Test results have an end date.
C
bsd-3-clause
ashfurrow/RXCollections,robrix/RXCollections,policp/RXCollections,ashfurrow/RXCollections,robrix/RXCollections,policp/RXCollections,robrix/Lagrangian,robrix/Lagrangian,robrix/RXCollections,robrix/Lagrangian,policp/RXCollections
c
## Code Before: // L3TestResult.h // Created by Rob Rix on 2012-11-12. // Copyright (c) 2012 Rob Rix. All rights reserved. #import <Foundation/Foundation.h> @interface L3TestResult : NSObject +(instancetype)testResultWithName:(NSString *)name startDate:(NSDate *)startDate; @property (strong, nonatomic) L3TestResult *parent; // this really only exists to make stacks easier; nil it out on pop or you get retain cycles @property (copy, nonatomic, readonly) NSString *name; @property (strong, nonatomic, readonly) NSDate *startDate; @property (assign, nonatomic) NSTimeInterval duration; @property (assign, nonatomic) NSUInteger testCaseCount; @property (assign, nonatomic) NSUInteger assertionCount; @property (assign, nonatomic) NSUInteger assertionFailureCount; @property (assign, nonatomic) NSUInteger exceptionCount; @property (nonatomic, readonly) bool succeeded; @property (nonatomic, readonly) bool failed; @property (nonatomic, readonly) NSArray *testResults; -(void)addTestResult:(L3TestResult *)testResult; @end ## Instruction: Test results have an end date. ## Code After: // L3TestResult.h // Created by Rob Rix on 2012-11-12. // Copyright (c) 2012 Rob Rix. All rights reserved. #import <Foundation/Foundation.h> @interface L3TestResult : NSObject +(instancetype)testResultWithName:(NSString *)name startDate:(NSDate *)startDate; @property (strong, nonatomic) L3TestResult *parent; // this really only exists to make stacks easier; nil it out on pop or you get retain cycles @property (copy, nonatomic, readonly) NSString *name; @property (strong, nonatomic, readonly) NSDate *startDate; @property (strong, nonatomic) NSDate *endDate; @property (assign, nonatomic) NSTimeInterval duration; @property (assign, nonatomic) NSUInteger testCaseCount; @property (assign, nonatomic) NSUInteger assertionCount; @property (assign, nonatomic) NSUInteger assertionFailureCount; @property (assign, nonatomic) NSUInteger exceptionCount; @property (nonatomic, readonly) bool succeeded; @property (nonatomic, readonly) bool failed; @property (nonatomic, readonly) NSArray *testResults; -(void)addTestResult:(L3TestResult *)testResult; @end
// ... existing code ... @property (copy, nonatomic, readonly) NSString *name; @property (strong, nonatomic, readonly) NSDate *startDate; @property (strong, nonatomic) NSDate *endDate; @property (assign, nonatomic) NSTimeInterval duration; @property (assign, nonatomic) NSUInteger testCaseCount; @property (assign, nonatomic) NSUInteger assertionCount; // ... rest of the code ...
e9605bd92e67c7f5daf7011f871c3a9d915abe76
core/urls/base.py
core/urls/base.py
from django.conf.urls import patterns, url, include from django.conf.urls.static import static from core.views import Home, register # Use this file to import all other url from game_website import settings urlpatterns = patterns( # Examples: # url(r'^blog/', include('blog.urls')), '', url(r'^$', Home.as_view(), name='home'), url(r'^games/', include('core.urls.games')), url(r'^register/$', register, name='register'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'), ) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # Used to serve static media in a dev environment. Should be disabled in production
from django.conf.urls import patterns, url, include from django.conf.urls.static import static from core.views import Home, register # Use this file to import all other url from game_website import settings urlpatterns = patterns( # Examples: # url(r'^blog/', include('blog.urls')), '', url(r'^$', Home.as_view(), name='home'), url(r'^games/', include('core.urls.games')), url(r'^register/$', register, name='register'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': 'core:home'}, name='logout'), ) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # Used to serve static media in a dev environment. Should be disabled in production
Make logout redirect to home
Make logout redirect to home
Python
mit
joshsamara/game-website,joshsamara/game-website,joshsamara/game-website
python
## Code Before: from django.conf.urls import patterns, url, include from django.conf.urls.static import static from core.views import Home, register # Use this file to import all other url from game_website import settings urlpatterns = patterns( # Examples: # url(r'^blog/', include('blog.urls')), '', url(r'^$', Home.as_view(), name='home'), url(r'^games/', include('core.urls.games')), url(r'^register/$', register, name='register'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'), ) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # Used to serve static media in a dev environment. Should be disabled in production ## Instruction: Make logout redirect to home ## Code After: from django.conf.urls import patterns, url, include from django.conf.urls.static import static from core.views import Home, register # Use this file to import all other url from game_website import settings urlpatterns = patterns( # Examples: # url(r'^blog/', include('blog.urls')), '', url(r'^$', Home.as_view(), name='home'), url(r'^games/', include('core.urls.games')), url(r'^register/$', register, name='register'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': 'core:home'}, name='logout'), ) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # Used to serve static media in a dev environment. Should be disabled in production
... url(r'^games/', include('core.urls.games')), url(r'^register/$', register, name='register'), url(r'^login/$', 'django.contrib.auth.views.login', name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': 'core:home'}, name='logout'), ) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # Used to serve static media in a dev environment. Should be disabled in production ...
5d97b41a7b814b078b0b7b7d930317342d0db3de
yaml_writer.py
yaml_writer.py
import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read())
import io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
Support python 2 with io.open
Support python 2 with io.open
Python
mit
Aalto-LeTech/a-plus-rst-tools,Aalto-LeTech/a-plus-rst-tools,Aalto-LeTech/a-plus-rst-tools
python
## Code Before: import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read()) ## Instruction: Support python 2 with io.open ## Code After: import io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
# ... existing code ... import io import os.path import yaml from sphinx.util.osutil import ensuredir # ... modified code ... def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read()) # ... rest of the code ...
8e58b413801a0dbbcd3e48a5ef94201a24af7e8e
are_there_spiders/are_there_spiders/custom_storages.py
are_there_spiders/are_there_spiders/custom_storages.py
from django.contrib.staticfiles.storage import CachedFilesMixin from pipeline.storage import PipelineMixin from storages.backends.s3boto import S3BotoStorage class S3PipelineStorage(PipelineMixin, CachedFilesMixin, S3BotoStorage): pass
import urllib import urlparse from django.contrib.staticfiles.storage import CachedFilesMixin from pipeline.storage import PipelineMixin from storages.backends.s3boto import S3BotoStorage # CachedFilesMixin doesn't play well with Boto and S3. It over-quotes things, # causing erratic failures. So we subclass. # (See http://stackoverflow.com/questions/11820566/inconsistent- # signaturedoesnotmatch-amazon-s3-with-django-pipeline-s3boto-and-st) class PatchedCachedFilesMixin(CachedFilesMixin): def url(self, *a, **kw): s = super(PatchedCachedFilesMixin, self).url(*a, **kw) if isinstance(s, unicode): s = s.encode('utf-8', 'ignore') scheme, netloc, path, qs, anchor = urlparse.urlsplit(s) path = urllib.quote(path, '/%') qs = urllib.quote_plus(qs, ':&=') return urlparse.urlunsplit((scheme, netloc, path, qs, anchor)) class S3PipelineStorage(PipelineMixin, PatchedCachedFilesMixin, S3BotoStorage): pass
Revert "Improvement to custom storage."
Revert "Improvement to custom storage." This reverts commit 6f185ac7398f30653dff9403d5ebf5539d222f4c.
Python
mit
wlonk/are_there_spiders,wlonk/are_there_spiders,wlonk/are_there_spiders
python
## Code Before: from django.contrib.staticfiles.storage import CachedFilesMixin from pipeline.storage import PipelineMixin from storages.backends.s3boto import S3BotoStorage class S3PipelineStorage(PipelineMixin, CachedFilesMixin, S3BotoStorage): pass ## Instruction: Revert "Improvement to custom storage." This reverts commit 6f185ac7398f30653dff9403d5ebf5539d222f4c. ## Code After: import urllib import urlparse from django.contrib.staticfiles.storage import CachedFilesMixin from pipeline.storage import PipelineMixin from storages.backends.s3boto import S3BotoStorage # CachedFilesMixin doesn't play well with Boto and S3. It over-quotes things, # causing erratic failures. So we subclass. # (See http://stackoverflow.com/questions/11820566/inconsistent- # signaturedoesnotmatch-amazon-s3-with-django-pipeline-s3boto-and-st) class PatchedCachedFilesMixin(CachedFilesMixin): def url(self, *a, **kw): s = super(PatchedCachedFilesMixin, self).url(*a, **kw) if isinstance(s, unicode): s = s.encode('utf-8', 'ignore') scheme, netloc, path, qs, anchor = urlparse.urlsplit(s) path = urllib.quote(path, '/%') qs = urllib.quote_plus(qs, ':&=') return urlparse.urlunsplit((scheme, netloc, path, qs, anchor)) class S3PipelineStorage(PipelineMixin, PatchedCachedFilesMixin, S3BotoStorage): pass
# ... existing code ... import urllib import urlparse from django.contrib.staticfiles.storage import CachedFilesMixin from pipeline.storage import PipelineMixin # ... modified code ... from storages.backends.s3boto import S3BotoStorage # CachedFilesMixin doesn't play well with Boto and S3. It over-quotes things, # causing erratic failures. So we subclass. # (See http://stackoverflow.com/questions/11820566/inconsistent- # signaturedoesnotmatch-amazon-s3-with-django-pipeline-s3boto-and-st) class PatchedCachedFilesMixin(CachedFilesMixin): def url(self, *a, **kw): s = super(PatchedCachedFilesMixin, self).url(*a, **kw) if isinstance(s, unicode): s = s.encode('utf-8', 'ignore') scheme, netloc, path, qs, anchor = urlparse.urlsplit(s) path = urllib.quote(path, '/%') qs = urllib.quote_plus(qs, ':&=') return urlparse.urlunsplit((scheme, netloc, path, qs, anchor)) class S3PipelineStorage(PipelineMixin, PatchedCachedFilesMixin, S3BotoStorage): pass # ... rest of the code ...
ea67ca087b06347625f8116e1583fd046a75159a
providers/pt/rcaap/apps.py
providers/pt/rcaap/apps.py
from share.provider import OAIProviderAppConfig class AppConfig(OAIProviderAppConfig): name = 'providers.pt.rcaap' version = '0.0.1' title = 'rcaap' long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal' home_page = 'http://www.rcaap.pt' url = 'http://www.rcaap.pt/oai' approved_sets = ['portugal']
from share.provider import OAIProviderAppConfig class AppConfig(OAIProviderAppConfig): name = 'providers.pt.rcaap' version = '0.0.1' title = 'rcaap' long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal' home_page = 'http://www.rcaap.pt' url = 'http://www.rcaap.pt/oai' approved_sets = ['portugal'] time_granularity = False
Remove time granularity from rcaap
Remove time granularity from rcaap
Python
apache-2.0
laurenbarker/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,aaxelb/SHARE,zamattiac/SHARE,aaxelb/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,laurenbarker/SHARE,zamattiac/SHARE,aaxelb/SHARE
python
## Code Before: from share.provider import OAIProviderAppConfig class AppConfig(OAIProviderAppConfig): name = 'providers.pt.rcaap' version = '0.0.1' title = 'rcaap' long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal' home_page = 'http://www.rcaap.pt' url = 'http://www.rcaap.pt/oai' approved_sets = ['portugal'] ## Instruction: Remove time granularity from rcaap ## Code After: from share.provider import OAIProviderAppConfig class AppConfig(OAIProviderAppConfig): name = 'providers.pt.rcaap' version = '0.0.1' title = 'rcaap' long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal' home_page = 'http://www.rcaap.pt' url = 'http://www.rcaap.pt/oai' approved_sets = ['portugal'] time_granularity = False
// ... existing code ... home_page = 'http://www.rcaap.pt' url = 'http://www.rcaap.pt/oai' approved_sets = ['portugal'] time_granularity = False // ... rest of the code ...
5574f0f01ecb4f38cad51eaa595de819cf8d7f4e
dependencies-overview/src/main/kotlin/com/vgaidarji/dependencies/overview/DependenciesOverviewTask.kt
dependencies-overview/src/main/kotlin/com/vgaidarji/dependencies/overview/DependenciesOverviewTask.kt
package com.vgaidarji.dependencies.overview import org.gradle.api.DefaultTask import org.gradle.api.tasks.TaskAction open class DependenciesOverviewTask : DefaultTask() { override fun getDescription(): String { return "Generates project dependencies overview table from project dependencies" } override fun getGroup(): String { return "documentation" } @TaskAction fun generate() { println("---DependenciesOverviewTask---") } }
package com.vgaidarji.dependencies.overview import org.gradle.api.DefaultTask import org.gradle.api.tasks.TaskAction open class DependenciesOverviewTask : DefaultTask() { init { description = "Generates project dependencies overview table from project dependencies" group = "documentation" } @TaskAction fun generate() { println("---DependenciesOverviewTask---") } }
Move task configuration to init block
Move task configuration to init block
Kotlin
apache-2.0
vgaidarji/dependencies-overview,vgaidarji/dependencies-overview,vgaidarji/dependencies-overview
kotlin
## Code Before: package com.vgaidarji.dependencies.overview import org.gradle.api.DefaultTask import org.gradle.api.tasks.TaskAction open class DependenciesOverviewTask : DefaultTask() { override fun getDescription(): String { return "Generates project dependencies overview table from project dependencies" } override fun getGroup(): String { return "documentation" } @TaskAction fun generate() { println("---DependenciesOverviewTask---") } } ## Instruction: Move task configuration to init block ## Code After: package com.vgaidarji.dependencies.overview import org.gradle.api.DefaultTask import org.gradle.api.tasks.TaskAction open class DependenciesOverviewTask : DefaultTask() { init { description = "Generates project dependencies overview table from project dependencies" group = "documentation" } @TaskAction fun generate() { println("---DependenciesOverviewTask---") } }
# ... existing code ... open class DependenciesOverviewTask : DefaultTask() { init { description = "Generates project dependencies overview table from project dependencies" group = "documentation" } @TaskAction # ... rest of the code ...
44479e90bbcd4f96330dc726c41069d7c93965b2
src/com/edinarobotics/zephyr/Zephyr.java
src/com/edinarobotics/zephyr/Zephyr.java
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package com.edinarobotics.zephyr; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Zephyr extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { Joystick joystick = new Joystick(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(joystick.getRawAxis(2)); components.rightJaguar.set(joystick.getRawAxis(5)); } components.leftJaguar.set(0); components.rightJaguar.set(0); } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package com.edinarobotics.zephyr; import com.edinarobotics.utils.gamepad.Gamepad; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Zephyr extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); } components.leftJaguar.set(0); components.rightJaguar.set(0); } }
Change teleop to drive with the new Gamepad class.
Change teleop to drive with the new Gamepad class.
Java
bsd-3-clause
TheGreenMachine/Zephyr-Java
java
## Code Before: /*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package com.edinarobotics.zephyr; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Zephyr extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { Joystick joystick = new Joystick(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(joystick.getRawAxis(2)); components.rightJaguar.set(joystick.getRawAxis(5)); } components.leftJaguar.set(0); components.rightJaguar.set(0); } } ## Instruction: Change teleop to drive with the new Gamepad class. ## Code After: /*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package com.edinarobotics.zephyr; import com.edinarobotics.utils.gamepad.Gamepad; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Zephyr extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); } components.leftJaguar.set(0); components.rightJaguar.set(0); } }
... package com.edinarobotics.zephyr; import com.edinarobotics.utils.gamepad.Gamepad; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.SimpleRobot; ... * This function is called once each time the robot enters operator control. */ public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); } components.leftJaguar.set(0); components.rightJaguar.set(0); ...