commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
fd054790ce32c3918f6edbe824540c09d7efce59
stagehand/providers/__init__.py
stagehand/providers/__init__.py
import asyncio from ..utils import load_plugins, invoke_plugins from .base import ProviderError plugins, broken_plugins = load_plugins('providers', ['thetvdb', 'tvrage']) @asyncio.coroutine def start(manager): """ Called when the manager is starting. """ yield from invoke_plugins(plugins, 'start', manager) for name, error in broken_plugins.items(): log.warning('failed to load provider plugin %s: %s', name, error)
import asyncio from ..utils import load_plugins, invoke_plugins from .base import ProviderError plugins, broken_plugins = load_plugins('providers', ['thetvdb']) @asyncio.coroutine def start(manager): """ Called when the manager is starting. """ yield from invoke_plugins(plugins, 'start', manager) for name, error in broken_plugins.items(): log.warning('failed to load provider plugin %s: %s', name, error)
Remove tvrage from active providers as site is shut down
Remove tvrage from active providers as site is shut down
Python
mit
jtackaberry/stagehand,jtackaberry/stagehand
python
## Code Before: import asyncio from ..utils import load_plugins, invoke_plugins from .base import ProviderError plugins, broken_plugins = load_plugins('providers', ['thetvdb', 'tvrage']) @asyncio.coroutine def start(manager): """ Called when the manager is starting. """ yield from invoke_plugins(plugins, 'start', manager) for name, error in broken_plugins.items(): log.warning('failed to load provider plugin %s: %s', name, error) ## Instruction: Remove tvrage from active providers as site is shut down ## Code After: import asyncio from ..utils import load_plugins, invoke_plugins from .base import ProviderError plugins, broken_plugins = load_plugins('providers', ['thetvdb']) @asyncio.coroutine def start(manager): """ Called when the manager is starting. """ yield from invoke_plugins(plugins, 'start', manager) for name, error in broken_plugins.items(): log.warning('failed to load provider plugin %s: %s', name, error)
... from ..utils import load_plugins, invoke_plugins from .base import ProviderError plugins, broken_plugins = load_plugins('providers', ['thetvdb']) @asyncio.coroutine def start(manager): ...
c57fd21ca62f9217a943cec5111b64403e968ab5
kimochi/scripts/initializedb.py
kimochi/scripts/initializedb.py
import os import sys import transaction from sqlalchemy import engine_from_config from pyramid.paster import ( get_appsettings, setup_logging, ) from pyramid.scripts.common import parse_vars from ..models import ( DBSession, Base, ) def usage(argv): cmd = os.path.basename(argv[0]) print('usage: %s <config_uri> [var=value]\n' '(example: "%s development.ini")' % (cmd, cmd)) sys.exit(1) def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine)
import os import sys import transaction from sqlalchemy import engine_from_config from pyramid.paster import ( get_appsettings, setup_logging, ) from pyramid.scripts.common import parse_vars from ..models import ( DBSession, Base, User, ) def usage(argv): cmd = os.path.basename(argv[0]) print('usage: %s <config_uri> [var=value]\n' '(example: "%s development.ini")' % (cmd, cmd)) sys.exit(1) def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: DBSession.add(User(email='[email protected]', password='test', admin=True))
Add temporary default admin user
Add temporary default admin user
Python
mit
matslindh/kimochi,matslindh/kimochi
python
## Code Before: import os import sys import transaction from sqlalchemy import engine_from_config from pyramid.paster import ( get_appsettings, setup_logging, ) from pyramid.scripts.common import parse_vars from ..models import ( DBSession, Base, ) def usage(argv): cmd = os.path.basename(argv[0]) print('usage: %s <config_uri> [var=value]\n' '(example: "%s development.ini")' % (cmd, cmd)) sys.exit(1) def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) ## Instruction: Add temporary default admin user ## Code After: import os import sys import transaction from sqlalchemy import engine_from_config from pyramid.paster import ( get_appsettings, setup_logging, ) from pyramid.scripts.common import parse_vars from ..models import ( DBSession, Base, User, ) def usage(argv): cmd = os.path.basename(argv[0]) print('usage: %s <config_uri> [var=value]\n' '(example: "%s development.ini")' % (cmd, cmd)) sys.exit(1) def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: DBSession.add(User(email='[email protected]', password='test', admin=True))
// ... existing code ... from ..models import ( DBSession, Base, User, ) def usage(argv): // ... modified code ... engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: DBSession.add(User(email='[email protected]', password='test', admin=True)) // ... rest of the code ...
5b73f28162cbbadcec295fe773cace920ea7f1c6
test/com/twu/biblioteca/ExampleTest.java
test/com/twu/biblioteca/ExampleTest.java
package com.twu.biblioteca; import org.junit.Test; import static org.junit.Assert.assertEquals; public class ExampleTest { @Test public void test() { assertEquals(1, 1); } }
package com.twu.biblioteca; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import static org.junit.Assert.assertEquals; public class ExampleTest { private ByteArrayOutputStream outStream = new ByteArrayOutputStream(); @Before public void setupStreams() { System.setOut(new PrintStream(outStream)); } @After public void tearDown() { System.setOut(null); } @Test public void testWelcomeMessage() { String expected = "Welcome to Biblioteca.\n"; BibliotecaApp.main(null); assertEquals(expected, outStream.toString()); } }
Test for welcome message (failing)
Test for welcome message (failing)
Java
apache-2.0
watsonarw/twu-biblioteca-andrewwatson
java
## Code Before: package com.twu.biblioteca; import org.junit.Test; import static org.junit.Assert.assertEquals; public class ExampleTest { @Test public void test() { assertEquals(1, 1); } } ## Instruction: Test for welcome message (failing) ## Code After: package com.twu.biblioteca; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import static org.junit.Assert.assertEquals; public class ExampleTest { private ByteArrayOutputStream outStream = new ByteArrayOutputStream(); @Before public void setupStreams() { System.setOut(new PrintStream(outStream)); } @After public void tearDown() { System.setOut(null); } @Test public void testWelcomeMessage() { String expected = "Welcome to Biblioteca.\n"; BibliotecaApp.main(null); assertEquals(expected, outStream.toString()); } }
// ... existing code ... package com.twu.biblioteca; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import static org.junit.Assert.assertEquals; public class ExampleTest { private ByteArrayOutputStream outStream = new ByteArrayOutputStream(); @Before public void setupStreams() { System.setOut(new PrintStream(outStream)); } @After public void tearDown() { System.setOut(null); } @Test public void testWelcomeMessage() { String expected = "Welcome to Biblioteca.\n"; BibliotecaApp.main(null); assertEquals(expected, outStream.toString()); } } // ... rest of the code ...
0438825672a407eb30bff49e03dac89a0534f28a
minimax.py
minimax.py
class Heuristic: def __init__(self, color): self.color = color def heuristic(self, board, color): raise NotImplementedError('Dont override this class') def eval(self, vector): pass class Minimax: def __init__(self, me, challenger): self.me = me self.challenger = challenger def heuristic(self, board, color): if color == self.color_me: return self.me.heuristic(board, color) else return self.challenger.heuristic(board, color)
class Heuristic: def __init__(self, color): self.color = color def heuristic(self, board, color): raise NotImplementedError('Dont override this class') def eval(self, vector): raise NotImplementedError('Dont override this class') class Minimax: def __init__(self, me, challenger): self.me = me self.challenger = challenger def heuristic(self, board, color): if color == self.color_me: return self.me.heuristic(board, color) else return self.challenger.heuristic(board, color) def calculate_min_or_max(self, vector_values, color): if color == self.me.color: return self.me.eval(vector_values) else: return self.challenger.eval(vector_values)
Create in MinMax the calculate_min_max
Create in MinMax the calculate_min_max
Python
apache-2.0
frila/agente-minimax
python
## Code Before: class Heuristic: def __init__(self, color): self.color = color def heuristic(self, board, color): raise NotImplementedError('Dont override this class') def eval(self, vector): pass class Minimax: def __init__(self, me, challenger): self.me = me self.challenger = challenger def heuristic(self, board, color): if color == self.color_me: return self.me.heuristic(board, color) else return self.challenger.heuristic(board, color) ## Instruction: Create in MinMax the calculate_min_max ## Code After: class Heuristic: def __init__(self, color): self.color = color def heuristic(self, board, color): raise NotImplementedError('Dont override this class') def eval(self, vector): raise NotImplementedError('Dont override this class') class Minimax: def __init__(self, me, challenger): self.me = me self.challenger = challenger def heuristic(self, board, color): if color == self.color_me: return self.me.heuristic(board, color) else return self.challenger.heuristic(board, color) def calculate_min_or_max(self, vector_values, color): if color == self.me.color: return self.me.eval(vector_values) else: return self.challenger.eval(vector_values)
... raise NotImplementedError('Dont override this class') def eval(self, vector): raise NotImplementedError('Dont override this class') class Minimax: def __init__(self, me, challenger): ... return self.me.heuristic(board, color) else return self.challenger.heuristic(board, color) def calculate_min_or_max(self, vector_values, color): if color == self.me.color: return self.me.eval(vector_values) else: return self.challenger.eval(vector_values) ...
061bf34805bbe88b25f9084b9f99a7096689c5d8
library/strings_format.h
library/strings_format.h
// Tinyformat has a number of non-annotated switch fallthrough cases #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" #endif #include "dependencies/tinyformat/tinyformat.h" #ifdef __GNUC__ #pragma GCC diagnostic pop #endif #include "library/strings.h" namespace OpenApoc { template <typename... Args> static UString format(const UString &fmt, Args &&... args) { return tfm::format(fmt.cStr(), std::forward<Args>(args)...); } UString tr(const UString &str, const UString domain = "ufo_string"); } // namespace OpenApoc
namespace OpenApoc { template <typename... Args> static UString format(const UString &fmt, Args &&... args) { return fmt::sprintf(fmt.str(), std::forward<Args>(args)...); } UString tr(const UString &str, const UString domain = "ufo_string"); } // namespace OpenApoc template <> struct fmt::formatter<OpenApoc::UString> : formatter<std::string> { template <typename FormatContext> auto format(const OpenApoc::UString &s, FormatContext &ctx) { return formatter<std::string>::format(s.str(), ctx); } };
Use fmtlib's sprintf instead of tinyformat
Use fmtlib's sprintf instead of tinyformat TODO: Move to the indexed formatting strings for fmtlib
C
mit
Istrebitel/OpenApoc,pmprog/OpenApoc,pmprog/OpenApoc,steveschnepp/OpenApoc,Istrebitel/OpenApoc,steveschnepp/OpenApoc
c
## Code Before: // Tinyformat has a number of non-annotated switch fallthrough cases #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" #endif #include "dependencies/tinyformat/tinyformat.h" #ifdef __GNUC__ #pragma GCC diagnostic pop #endif #include "library/strings.h" namespace OpenApoc { template <typename... Args> static UString format(const UString &fmt, Args &&... args) { return tfm::format(fmt.cStr(), std::forward<Args>(args)...); } UString tr(const UString &str, const UString domain = "ufo_string"); } // namespace OpenApoc ## Instruction: Use fmtlib's sprintf instead of tinyformat TODO: Move to the indexed formatting strings for fmtlib ## Code After: namespace OpenApoc { template <typename... Args> static UString format(const UString &fmt, Args &&... args) { return fmt::sprintf(fmt.str(), std::forward<Args>(args)...); } UString tr(const UString &str, const UString domain = "ufo_string"); } // namespace OpenApoc template <> struct fmt::formatter<OpenApoc::UString> : formatter<std::string> { template <typename FormatContext> auto format(const OpenApoc::UString &s, FormatContext &ctx) { return formatter<std::string>::format(s.str(), ctx); } };
... namespace OpenApoc { template <typename... Args> static UString format(const UString &fmt, Args &&... args) { return fmt::sprintf(fmt.str(), std::forward<Args>(args)...); } UString tr(const UString &str, const UString domain = "ufo_string"); } // namespace OpenApoc template <> struct fmt::formatter<OpenApoc::UString> : formatter<std::string> { template <typename FormatContext> auto format(const OpenApoc::UString &s, FormatContext &ctx) { return formatter<std::string>::format(s.str(), ctx); } }; ...
2abb0ff8d8f57ca1ae05e0abfc17bdcbbc758e19
main.py
main.py
import phiface sc = phiface.Context() yloc = 30 for weight in [1, 3, 6, 10]: xloc = 30 A = phiface.AGlyph(x=xloc, y=yloc) xloc += A.width() + 40 E = phiface.EGlyph(x=xloc, y=yloc) xloc += E.width() + 20 I = phiface.IGlyph(x=xloc, y=yloc) xloc += I.width() + 20 T = phiface.TGlyph(x=xloc, y=yloc) A.w = E.w = I.w = T.w = weight sc.draw([A, E, I, T]) yloc += A.capHeight() + 20 sc.write("output.png")
import phiface sc = phiface.Context() yloc = 30 for weight in [1, 3, 6, 10]: xloc = 30 A = phiface.AGlyph(x=xloc, y=yloc) xloc += A.width() + 40 E = phiface.EGlyph(x=xloc, y=yloc) xloc += E.width() + 20 I = phiface.IGlyph(x=xloc, y=yloc) xloc += I.width() + 20 T = phiface.TGlyph(x=xloc, y=yloc) A.w = E.w = I.w = T.w = (weight * (A.capHeight() / 150.0)) sc.draw([A, E, I, T]) yloc += A.capHeight() + 20 sc.write("output.png")
Change weight rendering based on capHeight
Change weight rendering based on capHeight
Python
bsd-2-clause
hortont424/phiface
python
## Code Before: import phiface sc = phiface.Context() yloc = 30 for weight in [1, 3, 6, 10]: xloc = 30 A = phiface.AGlyph(x=xloc, y=yloc) xloc += A.width() + 40 E = phiface.EGlyph(x=xloc, y=yloc) xloc += E.width() + 20 I = phiface.IGlyph(x=xloc, y=yloc) xloc += I.width() + 20 T = phiface.TGlyph(x=xloc, y=yloc) A.w = E.w = I.w = T.w = weight sc.draw([A, E, I, T]) yloc += A.capHeight() + 20 sc.write("output.png") ## Instruction: Change weight rendering based on capHeight ## Code After: import phiface sc = phiface.Context() yloc = 30 for weight in [1, 3, 6, 10]: xloc = 30 A = phiface.AGlyph(x=xloc, y=yloc) xloc += A.width() + 40 E = phiface.EGlyph(x=xloc, y=yloc) xloc += E.width() + 20 I = phiface.IGlyph(x=xloc, y=yloc) xloc += I.width() + 20 T = phiface.TGlyph(x=xloc, y=yloc) A.w = E.w = I.w = T.w = (weight * (A.capHeight() / 150.0)) sc.draw([A, E, I, T]) yloc += A.capHeight() + 20 sc.write("output.png")
# ... existing code ... xloc += I.width() + 20 T = phiface.TGlyph(x=xloc, y=yloc) A.w = E.w = I.w = T.w = (weight * (A.capHeight() / 150.0)) sc.draw([A, E, I, T]) # ... rest of the code ...
b5a53221c7ea9c9c0d2f90286594edb8a65b430b
core/src/main/java/org/javarosa/core/model/instance/utils/FormLoadingUtils.java
core/src/main/java/org/javarosa/core/model/instance/utils/FormLoadingUtils.java
package org.javarosa.core.model.instance.utils; import org.javarosa.core.model.instance.FormInstance; import org.javarosa.core.model.instance.TreeElement; import org.javarosa.xml.ElementParser; import org.javarosa.xml.TreeElementParser; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.io.InputStream; /** * Collection of static form loading methods * * @author Phillip Mates */ public class FormLoadingUtils { public static FormInstance loadFormInstance(String formFilepath) throws InvalidStructureException, IOException { TreeElement root = xmlToTreeElement(formFilepath); return new FormInstance(root, null); } public static TreeElement xmlToTreeElement(String xmlFilepath) throws InvalidStructureException, IOException { InputStream is = System.class.getResourceAsStream(xmlFilepath); TreeElementParser parser = new TreeElementParser(ElementParser.instantiateParser(is), 0, "instance"); try { return parser.parse(); } catch (XmlPullParserException e) { throw new IOException(e.getMessage()); } catch (UnfullfilledRequirementsException e) { throw new IOException(e.getMessage()); } } }
package org.javarosa.core.model.instance.utils; import org.javarosa.core.model.instance.FormInstance; import org.javarosa.core.model.instance.TreeElement; import org.javarosa.xml.ElementParser; import org.javarosa.xml.TreeElementParser; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.io.InputStream; /** * Collection of static form loading methods * * @author Phillip Mates */ public class FormLoadingUtils { public static FormInstance loadFormInstance(String formFilepath) throws InvalidStructureException, IOException { TreeElement root = xmlToTreeElement(formFilepath); return new FormInstance(root, null); } public static TreeElement xmlToTreeElement(String xmlFilepath) throws InvalidStructureException, IOException { InputStream is = FormLoadingUtils.class.getResourceAsStream(xmlFilepath); TreeElementParser parser = new TreeElementParser(ElementParser.instantiateParser(is), 0, "instance"); try { return parser.parse(); } catch (XmlPullParserException e) { throw new IOException(e.getMessage()); } catch (UnfullfilledRequirementsException e) { throw new IOException(e.getMessage()); } } }
Fix resource loading on android
Fix resource loading on android
Java
apache-2.0
dimagi/commcare,dimagi/commcare-core,dimagi/commcare,dimagi/commcare,dimagi/commcare-core,dimagi/commcare-core
java
## Code Before: package org.javarosa.core.model.instance.utils; import org.javarosa.core.model.instance.FormInstance; import org.javarosa.core.model.instance.TreeElement; import org.javarosa.xml.ElementParser; import org.javarosa.xml.TreeElementParser; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.io.InputStream; /** * Collection of static form loading methods * * @author Phillip Mates */ public class FormLoadingUtils { public static FormInstance loadFormInstance(String formFilepath) throws InvalidStructureException, IOException { TreeElement root = xmlToTreeElement(formFilepath); return new FormInstance(root, null); } public static TreeElement xmlToTreeElement(String xmlFilepath) throws InvalidStructureException, IOException { InputStream is = System.class.getResourceAsStream(xmlFilepath); TreeElementParser parser = new TreeElementParser(ElementParser.instantiateParser(is), 0, "instance"); try { return parser.parse(); } catch (XmlPullParserException e) { throw new IOException(e.getMessage()); } catch (UnfullfilledRequirementsException e) { throw new IOException(e.getMessage()); } } } ## Instruction: Fix resource loading on android ## Code After: package org.javarosa.core.model.instance.utils; import org.javarosa.core.model.instance.FormInstance; import org.javarosa.core.model.instance.TreeElement; import org.javarosa.xml.ElementParser; import org.javarosa.xml.TreeElementParser; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.io.InputStream; /** * Collection of static form loading methods * * @author Phillip Mates */ public class FormLoadingUtils { public static FormInstance loadFormInstance(String formFilepath) throws InvalidStructureException, IOException { TreeElement root = xmlToTreeElement(formFilepath); return new FormInstance(root, null); } public static TreeElement xmlToTreeElement(String xmlFilepath) throws InvalidStructureException, IOException { InputStream is = FormLoadingUtils.class.getResourceAsStream(xmlFilepath); TreeElementParser parser = new TreeElementParser(ElementParser.instantiateParser(is), 0, "instance"); try { return parser.parse(); } catch (XmlPullParserException e) { throw new IOException(e.getMessage()); } catch (UnfullfilledRequirementsException e) { throw new IOException(e.getMessage()); } } }
// ... existing code ... public static TreeElement xmlToTreeElement(String xmlFilepath) throws InvalidStructureException, IOException { InputStream is = FormLoadingUtils.class.getResourceAsStream(xmlFilepath); TreeElementParser parser = new TreeElementParser(ElementParser.instantiateParser(is), 0, "instance"); try { // ... rest of the code ...
26bd5e00cf30446860438cc5796ec348aecf7e2b
product_configurator/models/stock.py
product_configurator/models/stock.py
from odoo import models, fields class StockMove(models.Model): _inherit = 'stock.move' product_id = fields.Many2one(domain=[('config_ok', '=', False)])
from ast import literal_eval from odoo import models, fields class StockMove(models.Model): _inherit = 'stock.move' def _get_product_domain(self): if literal_eval(self.env['ir.config_parameter'].sudo().get_param('product_configurator.product_selectable', default='False')): return [] else: return [('config_ok', '=', False)] product_id = fields.Many2one(domain=_get_product_domain)
Put configurable product in Picking list
Put configurable product in Picking list
Python
agpl-3.0
microcom/odoo-product-configurator,microcom/odoo-product-configurator,microcom/odoo-product-configurator
python
## Code Before: from odoo import models, fields class StockMove(models.Model): _inherit = 'stock.move' product_id = fields.Many2one(domain=[('config_ok', '=', False)]) ## Instruction: Put configurable product in Picking list ## Code After: from ast import literal_eval from odoo import models, fields class StockMove(models.Model): _inherit = 'stock.move' def _get_product_domain(self): if literal_eval(self.env['ir.config_parameter'].sudo().get_param('product_configurator.product_selectable', default='False')): return [] else: return [('config_ok', '=', False)] product_id = fields.Many2one(domain=_get_product_domain)
// ... existing code ... from ast import literal_eval from odoo import models, fields // ... modified code ... class StockMove(models.Model): _inherit = 'stock.move' def _get_product_domain(self): if literal_eval(self.env['ir.config_parameter'].sudo().get_param('product_configurator.product_selectable', default='False')): return [] else: return [('config_ok', '=', False)] product_id = fields.Many2one(domain=_get_product_domain) // ... rest of the code ...
05cb698d45ce4e33e2f4bfdc38f9633083a284a7
test_project/project_specific/generic_channel_example.py
test_project/project_specific/generic_channel_example.py
import autocomplete_light from models import Contact, Address class MyGenericChannel(autocomplete_light.GenericChannelBase): def get_querysets(self): return { Contact: Contact.objects.all(), Address: Address.objects.all(), } def order_results(self, results): if results.model == Address: return results.order_by('street') elif results.model == Contact: return results.order_by('name') autocomplete_light.register(MyGenericChannel)
import autocomplete_light from models import Contact, Address class MyGenericChannel(autocomplete_light.GenericChannelBase): def get_querysets(self): return { Contact: Contact.objects.all(), Address: Address.objects.all(), } def order_results(self, results): if results.model == Address: return results.order_by('street') elif results.model == Contact: return results.order_by('name') def query_filter(self, results): q = self.request.GET.get('q', None) if q: if results.model == Address: results = results.filter(street__icontains=q) elif results.model == Contact: results = results.filter(name__icontains=q) return results autocomplete_light.register(MyGenericChannel)
Implement query_filter for MyGenericChannel, because it should search by something other than search_name in the case of Address
Implement query_filter for MyGenericChannel, because it should search by something other than search_name in the case of Address
Python
mit
Eraldo/django-autocomplete-light,spookylukey/django-autocomplete-light,Perkville/django-autocomplete-light,jonashaag/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Perkville/django-autocomplete-light,Visgean/django-autocomplete-light,dsanders11/django-autocomplete-light,Visgean/django-autocomplete-light,jonashaag/django-autocomplete-light,Eraldo/django-autocomplete-light,spookylukey/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,blueyed/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,Visgean/django-autocomplete-light,shubhamdipt/django-autocomplete-light,yourlabs/django-autocomplete-light,jonashaag/django-autocomplete-light,Perkville/django-autocomplete-light,Perkville/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,Visgean/django-autocomplete-light,luzfcb/django-autocomplete-light,luzfcb/django-autocomplete-light,Eraldo/django-autocomplete-light,blueyed/django-autocomplete-light,blueyed/django-autocomplete-light,dsanders11/django-autocomplete-light,spookylukey/django-autocomplete-light,Eraldo/django-autocomplete-light
python
## Code Before: import autocomplete_light from models import Contact, Address class MyGenericChannel(autocomplete_light.GenericChannelBase): def get_querysets(self): return { Contact: Contact.objects.all(), Address: Address.objects.all(), } def order_results(self, results): if results.model == Address: return results.order_by('street') elif results.model == Contact: return results.order_by('name') autocomplete_light.register(MyGenericChannel) ## Instruction: Implement query_filter for MyGenericChannel, because it should search by something other than search_name in the case of Address ## Code After: import autocomplete_light from models import Contact, Address class MyGenericChannel(autocomplete_light.GenericChannelBase): def get_querysets(self): return { Contact: Contact.objects.all(), Address: Address.objects.all(), } def order_results(self, results): if results.model == Address: return results.order_by('street') elif results.model == Contact: return results.order_by('name') def query_filter(self, results): q = self.request.GET.get('q', None) if q: if results.model == Address: results = results.filter(street__icontains=q) elif results.model == Contact: results = results.filter(name__icontains=q) return results autocomplete_light.register(MyGenericChannel)
# ... existing code ... elif results.model == Contact: return results.order_by('name') def query_filter(self, results): q = self.request.GET.get('q', None) if q: if results.model == Address: results = results.filter(street__icontains=q) elif results.model == Contact: results = results.filter(name__icontains=q) return results autocomplete_light.register(MyGenericChannel) # ... rest of the code ...
8318bae21bd5cb716a4cbf2cd2dfe46ea8cadbcf
manage.py
manage.py
import os import sys if __name__ == '__main__': os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings') os.environ.setdefault('DJANGO_CONFIGURATION', 'Development') import dotenv dotenv.read_dotenv('.env') from configurations.management import execute_from_command_line execute_from_command_line(sys.argv)
import os import sys if __name__ == '__main__': os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings') os.environ.setdefault('DJANGO_CONFIGURATION', 'Development') if os.environ['DJANGO_CONFIGURATION'] == 'Development': import dotenv dotenv.read_dotenv('.env') from configurations.management import execute_from_command_line execute_from_command_line(sys.argv)
Hide .env behind a development environment.
Hide .env behind a development environment.
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
python
## Code Before: import os import sys if __name__ == '__main__': os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings') os.environ.setdefault('DJANGO_CONFIGURATION', 'Development') import dotenv dotenv.read_dotenv('.env') from configurations.management import execute_from_command_line execute_from_command_line(sys.argv) ## Instruction: Hide .env behind a development environment. ## Code After: import os import sys if __name__ == '__main__': os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings') os.environ.setdefault('DJANGO_CONFIGURATION', 'Development') if os.environ['DJANGO_CONFIGURATION'] == 'Development': import dotenv dotenv.read_dotenv('.env') from configurations.management import execute_from_command_line execute_from_command_line(sys.argv)
# ... existing code ... os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings') os.environ.setdefault('DJANGO_CONFIGURATION', 'Development') if os.environ['DJANGO_CONFIGURATION'] == 'Development': import dotenv dotenv.read_dotenv('.env') from configurations.management import execute_from_command_line execute_from_command_line(sys.argv) # ... rest of the code ...
091b543fd8668d6f53bf126492aaaf47251d0672
src/ggrc_basic_permissions/roles/ProgramEditor.py
src/ggrc_basic_permissions/roles/ProgramEditor.py
scope = "Private Program" description = """ A user with authorization to edit mapping objects related to an access controlled program.<br/><br/>When a person has this role they can map and unmap objects to the Program and edit the Program info, but they are unable to delete the Program or assign other people roles for that program. """ permissions = { "read": [ "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship", "UserRole", "Context", ], "create": [ "Audit", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ], "view_object_page": [ "__GGRC_ALL__" ], "update": [ "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship" ], "delete": [ "Program", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ] }
scope = "Private Program" description = """ A user with authorization to edit mapping objects related to an access controlled program.<br/><br/>When a person has this role they can map and unmap objects to the Program and edit the Program info, but they are unable to delete the Program or assign other people roles for that program. """ permissions = { "read": [ "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship", "UserRole", "Context", ], "create": [ "Audit", "Snapshot", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ], "view_object_page": [ "__GGRC_ALL__" ], "update": [ "Snapshot", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship" ], "delete": [ "Program", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ] }
Add support for program editor to create and update snapshots
Add support for program editor to create and update snapshots
Python
apache-2.0
selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core
python
## Code Before: scope = "Private Program" description = """ A user with authorization to edit mapping objects related to an access controlled program.<br/><br/>When a person has this role they can map and unmap objects to the Program and edit the Program info, but they are unable to delete the Program or assign other people roles for that program. """ permissions = { "read": [ "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship", "UserRole", "Context", ], "create": [ "Audit", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ], "view_object_page": [ "__GGRC_ALL__" ], "update": [ "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship" ], "delete": [ "Program", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ] } ## Instruction: Add support for program editor to create and update snapshots ## Code After: scope = "Private Program" description = """ A user with authorization to edit mapping objects related to an access controlled program.<br/><br/>When a person has this role they can map and unmap objects to the Program and edit the Program info, but they are unable to delete the Program or assign other people roles for that program. """ permissions = { "read": [ "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship", "UserRole", "Context", ], "create": [ "Audit", "Snapshot", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ], "view_object_page": [ "__GGRC_ALL__" ], "update": [ "Snapshot", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Program", "Relationship" ], "delete": [ "Program", "ObjectDocument", "ObjectObjective", "ObjectPerson", "Relationship", ] }
# ... existing code ... ], "create": [ "Audit", "Snapshot", "ObjectDocument", "ObjectObjective", "ObjectPerson", # ... modified code ... "__GGRC_ALL__" ], "update": [ "Snapshot", "ObjectDocument", "ObjectObjective", "ObjectPerson", # ... rest of the code ...
70808a2243ebf04aa86d5b4539950b22cd96cc7d
maras/utils/__init__.py
maras/utils/__init__.py
''' Misc utilities ''' # Import python libs import os import binascii def rand_hex_str(size): ''' Return a random string of the passed size using hex encoding ''' return binascii.hexlify(os.urandom(size/2)) def rand_raw_str(size): ''' Return a raw byte string of the given size ''' return os.urandom(size)
''' Misc utilities ''' # Import python libs import os import time import struct import binascii import datetime # create a standard epoch so all platforms will count revs from # a standard epoch of jan 1 2014 STD_EPOCH = time.mktime(datetime.datetime(2014, 1, 1).timetuple()) def rand_hex_str(size): ''' Return a random string of the passed size using hex encoding ''' return binascii.hexlify(os.urandom(size/2)) def rand_raw_str(size): ''' Return a raw byte string of the given size ''' return os.urandom(size) def gen_rev(): ''' Return a revision based on timestamp ''' r_time = time.time() - STD_EPOCH return struct.pack('>Q', r_time * 1000000)
Add rev generation via normalized timestamps
Add rev generation via normalized timestamps
Python
apache-2.0
thatch45/maras
python
## Code Before: ''' Misc utilities ''' # Import python libs import os import binascii def rand_hex_str(size): ''' Return a random string of the passed size using hex encoding ''' return binascii.hexlify(os.urandom(size/2)) def rand_raw_str(size): ''' Return a raw byte string of the given size ''' return os.urandom(size) ## Instruction: Add rev generation via normalized timestamps ## Code After: ''' Misc utilities ''' # Import python libs import os import time import struct import binascii import datetime # create a standard epoch so all platforms will count revs from # a standard epoch of jan 1 2014 STD_EPOCH = time.mktime(datetime.datetime(2014, 1, 1).timetuple()) def rand_hex_str(size): ''' Return a random string of the passed size using hex encoding ''' return binascii.hexlify(os.urandom(size/2)) def rand_raw_str(size): ''' Return a raw byte string of the given size ''' return os.urandom(size) def gen_rev(): ''' Return a revision based on timestamp ''' r_time = time.time() - STD_EPOCH return struct.pack('>Q', r_time * 1000000)
... # Import python libs import os import time import struct import binascii import datetime # create a standard epoch so all platforms will count revs from # a standard epoch of jan 1 2014 STD_EPOCH = time.mktime(datetime.datetime(2014, 1, 1).timetuple()) def rand_hex_str(size): ... Return a raw byte string of the given size ''' return os.urandom(size) def gen_rev(): ''' Return a revision based on timestamp ''' r_time = time.time() - STD_EPOCH return struct.pack('>Q', r_time * 1000000) ...
889c1530c9b25a784447cd57659aecd3e5434942
src/include/executor/execdefs.h
src/include/executor/execdefs.h
/*------------------------------------------------------------------------- * * execdefs.h * * * * Copyright (c) 1994, Regents of the University of California * * $Id: execdefs.h,v 1.4 1999/02/13 23:21:22 momjian Exp $ * *------------------------------------------------------------------------- */ #ifndef EXECDEFS_H #define EXECDEFS_H /* ---------------- * ExecutePlan() tuplecount definitions * ---------------- */ #define ALL_TUPLES 0 /* return all tuples */ #define ONE_TUPLE 1 /* return only one tuple */ /* ---------------- * constants used by ExecMain * ---------------- */ #define EXEC_RUN 3 #define EXEC_FOR 4 #define EXEC_BACK 5 #define EXEC_RETONE 6 #define EXEC_RESULT 7 /* ---------------- * Merge Join states * ---------------- */ #define EXEC_MJ_INITIALIZE 1 #define EXEC_MJ_JOINMARK 2 #define EXEC_MJ_JOINTEST 3 #define EXEC_MJ_JOINTUPLES 4 #define EXEC_MJ_NEXTOUTER 5 #define EXEC_MJ_TESTOUTER 6 #define EXEC_MJ_NEXTINNER 7 #define EXEC_MJ_SKIPINNER 8 #define EXEC_MJ_SKIPOUTER 9 #endif /* EXECDEFS_H */
/*------------------------------------------------------------------------- * * execdefs.h * * * * Copyright (c) 1994, Regents of the University of California * * $Id: execdefs.h,v 1.5 1999/02/23 07:37:31 thomas Exp $ * *------------------------------------------------------------------------- */ #ifndef EXECDEFS_H #define EXECDEFS_H /* ---------------- * ExecutePlan() tuplecount definitions * ---------------- */ #define ALL_TUPLES 0 /* return all tuples */ #define ONE_TUPLE 1 /* return only one tuple */ /* ---------------- * constants used by ExecMain * ---------------- */ #define EXEC_RUN 3 #define EXEC_FOR 4 #define EXEC_BACK 5 #define EXEC_RETONE 6 #define EXEC_RESULT 7 /* ---------------- * Merge Join states * ---------------- */ #define EXEC_MJ_INITIALIZE 1 #define EXEC_MJ_JOINMARK 2 #define EXEC_MJ_JOINTEST 3 #define EXEC_MJ_JOINTUPLES 4 #define EXEC_MJ_NEXTOUTER 5 #define EXEC_MJ_TESTOUTER 6 #define EXEC_MJ_NEXTINNER 7 #define EXEC_MJ_SKIPINNER 8 #define EXEC_MJ_SKIPOUTER 9 #define EXEC_MJ_FILLINNER 10 #define EXEC_MJ_FILLOUTER 11 #endif /* EXECDEFS_H */
Add constants for outer join states in executor.
Add constants for outer join states in executor.
C
apache-2.0
Quikling/gpdb,greenplum-db/gpdb,pavanvd/postgres-xl,kmjungersen/PostgresXL,royc1/gpdb,Postgres-XL/Postgres-XL,adam8157/gpdb,ovr/postgres-xl,chrishajas/gpdb,rubikloud/gpdb,Chibin/gpdb,Quikling/gpdb,rvs/gpdb,oberstet/postgres-xl,tpostgres-projects/tPostgres,lintzc/gpdb,Chibin/gpdb,lisakowen/gpdb,ahachete/gpdb,0x0FFF/gpdb,lisakowen/gpdb,snaga/postgres-xl,rvs/gpdb,lisakowen/gpdb,Chibin/gpdb,cjcjameson/gpdb,lisakowen/gpdb,ashwinstar/gpdb,zaksoup/gpdb,50wu/gpdb,lpetrov-pivotal/gpdb,yuanzhao/gpdb,ashwinstar/gpdb,ahachete/gpdb,cjcjameson/gpdb,cjcjameson/gpdb,cjcjameson/gpdb,lpetrov-pivotal/gpdb,Chibin/gpdb,kaknikhil/gpdb,atris/gpdb,techdragon/Postgres-XL,yazun/postgres-xl,lpetrov-pivotal/gpdb,Postgres-XL/Postgres-XL,lintzc/gpdb,kaknikhil/gpdb,xuegang/gpdb,royc1/gpdb,royc1/gpdb,xuegang/gpdb,Postgres-XL/Postgres-XL,adam8157/gpdb,zaksoup/gpdb,arcivanov/postgres-xl,kaknikhil/gpdb,zaksoup/gpdb,tangp3/gpdb,yazun/postgres-xl,50wu/gpdb,xuegang/gpdb,chrishajas/gpdb,ashwinstar/gpdb,chrishajas/gpdb,xinzweb/gpdb,arcivanov/postgres-xl,ashwinstar/gpdb,ovr/postgres-xl,foyzur/gpdb,ahachete/gpdb,foyzur/gpdb,kaknikhil/gpdb,pavanvd/postgres-xl,xinzweb/gpdb,rvs/gpdb,chrishajas/gpdb,zaksoup/gpdb,lisakowen/gpdb,rvs/gpdb,kaknikhil/gpdb,arcivanov/postgres-xl,arcivanov/postgres-xl,tangp3/gpdb,greenplum-db/gpdb,oberstet/postgres-xl,zaksoup/gpdb,greenplum-db/gpdb,xinzweb/gpdb,0x0FFF/gpdb,snaga/postgres-xl,jmcatamney/gpdb,rvs/gpdb,jmcatamney/gpdb,postmind-net/postgres-xl,zeroae/postgres-xl,techdragon/Postgres-XL,cjcjameson/gpdb,kaknikhil/gpdb,foyzur/gpdb,edespino/gpdb,janebeckman/gpdb,rubikloud/gpdb,royc1/gpdb,randomtask1155/gpdb,CraigHarris/gpdb,edespino/gpdb,cjcjameson/gpdb,tpostgres-projects/tPostgres,jmcatamney/gpdb,lisakowen/gpdb,CraigHarris/gpdb,techdragon/Postgres-XL,zeroae/postgres-xl,kaknikhil/gpdb,xuegang/gpdb,zaksoup/gpdb,lpetrov-pivotal/gpdb,Chibin/gpdb,janebeckman/gpdb,kmjungersen/PostgresXL,xinzweb/gpdb,xuegang/gpdb,lintzc/gpdb,kaknikhil/gpdb,Quikling/gpdb,rubikloud/gpdb,yuanzhao/gpdb,ashwinstar/gpdb,royc1/gpdb,CraigHarris/gpdb,foyzur/gpdb,ahachete/gpdb,foyzur/gpdb,Quikling/gpdb,yazun/postgres-xl,postmind-net/postgres-xl,royc1/gpdb,adam8157/gpdb,yuanzhao/gpdb,lintzc/gpdb,kmjungersen/PostgresXL,ovr/postgres-xl,atris/gpdb,foyzur/gpdb,tangp3/gpdb,50wu/gpdb,randomtask1155/gpdb,jmcatamney/gpdb,snaga/postgres-xl,rvs/gpdb,oberstet/postgres-xl,Quikling/gpdb,royc1/gpdb,xuegang/gpdb,cjcjameson/gpdb,zaksoup/gpdb,rvs/gpdb,lpetrov-pivotal/gpdb,janebeckman/gpdb,edespino/gpdb,CraigHarris/gpdb,tangp3/gpdb,xinzweb/gpdb,techdragon/Postgres-XL,rubikloud/gpdb,rvs/gpdb,Quikling/gpdb,lintzc/gpdb,yazun/postgres-xl,lpetrov-pivotal/gpdb,edespino/gpdb,Postgres-XL/Postgres-XL,Postgres-XL/Postgres-XL,xuegang/gpdb,jmcatamney/gpdb,50wu/gpdb,50wu/gpdb,CraigHarris/gpdb,lintzc/gpdb,zaksoup/gpdb,randomtask1155/gpdb,atris/gpdb,adam8157/gpdb,royc1/gpdb,chrishajas/gpdb,janebeckman/gpdb,edespino/gpdb,yuanzhao/gpdb,xinzweb/gpdb,50wu/gpdb,rvs/gpdb,CraigHarris/gpdb,greenplum-db/gpdb,Quikling/gpdb,lpetrov-pivotal/gpdb,randomtask1155/gpdb,adam8157/gpdb,kmjungersen/PostgresXL,edespino/gpdb,zeroae/postgres-xl,edespino/gpdb,lisakowen/gpdb,Quikling/gpdb,tpostgres-projects/tPostgres,jmcatamney/gpdb,tangp3/gpdb,pavanvd/postgres-xl,ovr/postgres-xl,0x0FFF/gpdb,yuanzhao/gpdb,yuanzhao/gpdb,cjcjameson/gpdb,oberstet/postgres-xl,ashwinstar/gpdb,postmind-net/postgres-xl,lpetrov-pivotal/gpdb,lisakowen/gpdb,lintzc/gpdb,randomtask1155/gpdb,yuanzhao/gpdb,50wu/gpdb,arcivanov/postgres-xl,xinzweb/gpdb,yuanzhao/gpdb,janebeckman/gpdb,Chibin/gpdb,Quikling/gpdb,zeroae/postgres-xl,atris/gpdb,rubikloud/gpdb,Quikling/gpdb,rubikloud/gpdb,snaga/postgres-xl,kmjungersen/PostgresXL,randomtask1155/gpdb,janebeckman/gpdb,50wu/gpdb,ahachete/gpdb,yazun/postgres-xl,techdragon/Postgres-XL,Chibin/gpdb,yuanzhao/gpdb,janebeckman/gpdb,adam8157/gpdb,lintzc/gpdb,chrishajas/gpdb,lintzc/gpdb,tangp3/gpdb,janebeckman/gpdb,randomtask1155/gpdb,atris/gpdb,jmcatamney/gpdb,janebeckman/gpdb,ashwinstar/gpdb,greenplum-db/gpdb,tangp3/gpdb,arcivanov/postgres-xl,pavanvd/postgres-xl,rubikloud/gpdb,Chibin/gpdb,foyzur/gpdb,0x0FFF/gpdb,pavanvd/postgres-xl,randomtask1155/gpdb,0x0FFF/gpdb,CraigHarris/gpdb,edespino/gpdb,kaknikhil/gpdb,janebeckman/gpdb,ashwinstar/gpdb,edespino/gpdb,foyzur/gpdb,0x0FFF/gpdb,xinzweb/gpdb,Chibin/gpdb,oberstet/postgres-xl,ovr/postgres-xl,CraigHarris/gpdb,tpostgres-projects/tPostgres,ahachete/gpdb,rvs/gpdb,adam8157/gpdb,cjcjameson/gpdb,kaknikhil/gpdb,adam8157/gpdb,cjcjameson/gpdb,0x0FFF/gpdb,xuegang/gpdb,atris/gpdb,atris/gpdb,atris/gpdb,0x0FFF/gpdb,chrishajas/gpdb,jmcatamney/gpdb,ahachete/gpdb,chrishajas/gpdb,tpostgres-projects/tPostgres,ahachete/gpdb,CraigHarris/gpdb,rubikloud/gpdb,xuegang/gpdb,postmind-net/postgres-xl,tangp3/gpdb,greenplum-db/gpdb,Chibin/gpdb,greenplum-db/gpdb,yuanzhao/gpdb,edespino/gpdb,snaga/postgres-xl,greenplum-db/gpdb,postmind-net/postgres-xl,zeroae/postgres-xl
c
## Code Before: /*------------------------------------------------------------------------- * * execdefs.h * * * * Copyright (c) 1994, Regents of the University of California * * $Id: execdefs.h,v 1.4 1999/02/13 23:21:22 momjian Exp $ * *------------------------------------------------------------------------- */ #ifndef EXECDEFS_H #define EXECDEFS_H /* ---------------- * ExecutePlan() tuplecount definitions * ---------------- */ #define ALL_TUPLES 0 /* return all tuples */ #define ONE_TUPLE 1 /* return only one tuple */ /* ---------------- * constants used by ExecMain * ---------------- */ #define EXEC_RUN 3 #define EXEC_FOR 4 #define EXEC_BACK 5 #define EXEC_RETONE 6 #define EXEC_RESULT 7 /* ---------------- * Merge Join states * ---------------- */ #define EXEC_MJ_INITIALIZE 1 #define EXEC_MJ_JOINMARK 2 #define EXEC_MJ_JOINTEST 3 #define EXEC_MJ_JOINTUPLES 4 #define EXEC_MJ_NEXTOUTER 5 #define EXEC_MJ_TESTOUTER 6 #define EXEC_MJ_NEXTINNER 7 #define EXEC_MJ_SKIPINNER 8 #define EXEC_MJ_SKIPOUTER 9 #endif /* EXECDEFS_H */ ## Instruction: Add constants for outer join states in executor. ## Code After: /*------------------------------------------------------------------------- * * execdefs.h * * * * Copyright (c) 1994, Regents of the University of California * * $Id: execdefs.h,v 1.5 1999/02/23 07:37:31 thomas Exp $ * *------------------------------------------------------------------------- */ #ifndef EXECDEFS_H #define EXECDEFS_H /* ---------------- * ExecutePlan() tuplecount definitions * ---------------- */ #define ALL_TUPLES 0 /* return all tuples */ #define ONE_TUPLE 1 /* return only one tuple */ /* ---------------- * constants used by ExecMain * ---------------- */ #define EXEC_RUN 3 #define EXEC_FOR 4 #define EXEC_BACK 5 #define EXEC_RETONE 6 #define EXEC_RESULT 7 /* ---------------- * Merge Join states * ---------------- */ #define EXEC_MJ_INITIALIZE 1 #define EXEC_MJ_JOINMARK 2 #define EXEC_MJ_JOINTEST 3 #define EXEC_MJ_JOINTUPLES 4 #define EXEC_MJ_NEXTOUTER 5 #define EXEC_MJ_TESTOUTER 6 #define EXEC_MJ_NEXTINNER 7 #define EXEC_MJ_SKIPINNER 8 #define EXEC_MJ_SKIPOUTER 9 #define EXEC_MJ_FILLINNER 10 #define EXEC_MJ_FILLOUTER 11 #endif /* EXECDEFS_H */
... * * Copyright (c) 1994, Regents of the University of California * * $Id: execdefs.h,v 1.5 1999/02/23 07:37:31 thomas Exp $ * *------------------------------------------------------------------------- */ ... #define EXEC_MJ_NEXTINNER 7 #define EXEC_MJ_SKIPINNER 8 #define EXEC_MJ_SKIPOUTER 9 #define EXEC_MJ_FILLINNER 10 #define EXEC_MJ_FILLOUTER 11 #endif /* EXECDEFS_H */ ...
7b9b144ce8e7fca38500f5f0c4e2f5ec3b5d9e0f
tests/px_rambar_test.py
tests/px_rambar_test.py
import os import sys from px import px_rambar from px import px_terminal def test_render_bar_happy_path(): names_and_numbers = [(u"apa", 1000), (u"bepa", 300), (u"cepa", 50)] + [ (u"long tail", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" apa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") )
import os import sys from px import px_rambar from px import px_terminal def test_render_bar_happy_path(): names_and_numbers = [(u"apa", 1000), (u"bepa", 300), (u"cepa", 50)] + [ (u"long tail", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" apa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") ) def test_render_bar_happy_path_unicode(): names_and_numbers = [(u"åpa", 1000), (u"bäpa", 300), (u"cäpa", 50)] + [ (u"lång svans", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" åpa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") )
Verify rambar can do unicode
Verify rambar can do unicode
Python
mit
walles/px,walles/px
python
## Code Before: import os import sys from px import px_rambar from px import px_terminal def test_render_bar_happy_path(): names_and_numbers = [(u"apa", 1000), (u"bepa", 300), (u"cepa", 50)] + [ (u"long tail", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" apa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") ) ## Instruction: Verify rambar can do unicode ## Code After: import os import sys from px import px_rambar from px import px_terminal def test_render_bar_happy_path(): names_and_numbers = [(u"apa", 1000), (u"bepa", 300), (u"cepa", 50)] + [ (u"long tail", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" apa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") ) def test_render_bar_happy_path_unicode(): names_and_numbers = [(u"åpa", 1000), (u"bäpa", 300), (u"cäpa", 50)] + [ (u"lång svans", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" åpa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") )
# ... existing code ... import os import sys # ... modified code ... + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") ) def test_render_bar_happy_path_unicode(): names_and_numbers = [(u"åpa", 1000), (u"bäpa", 300), (u"cäpa", 50)] + [ (u"lång svans", 1) ] * 300 assert px_rambar.render_bar(10, names_and_numbers) == ( px_terminal.red(u" åpa ") + px_terminal.yellow(u" b") + px_terminal.blue(u" ") + px_terminal.inverse_video(u" ") ) # ... rest of the code ...
73a9889f0e43d2b1dc94e2235a94cb888e0eda89
zeus/utils/sentry.py
zeus/utils/sentry.py
from functools import wraps from sentry_sdk import Hub def span(op, desc_or_func=None): def inner(func): @wraps(func) def wrapped(*args, **kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): return func(*args, **kwargs) return wrapped return inner
import asyncio from contextlib import contextmanager from functools import wraps from sentry_sdk import Hub # https://stackoverflow.com/questions/44169998/how-to-create-a-python-decorator-that-can-wrap-either-coroutine-or-function def span(op, desc_or_func=None): def inner(func): @contextmanager def wrap_with_span(args, kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): yield @wraps(func) def wrapper(*args, **kwargs): if not asyncio.iscoroutinefunction(func): with wrap_with_span(args, kwargs): return func(*args, **kwargs) else: async def tmp(): with wrap_with_span(args, kwargs): return await func(*args, **kwargs) return tmp() return wrapper return inner
Fix span decorator to work with asyncio
Fix span decorator to work with asyncio
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
python
## Code Before: from functools import wraps from sentry_sdk import Hub def span(op, desc_or_func=None): def inner(func): @wraps(func) def wrapped(*args, **kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): return func(*args, **kwargs) return wrapped return inner ## Instruction: Fix span decorator to work with asyncio ## Code After: import asyncio from contextlib import contextmanager from functools import wraps from sentry_sdk import Hub # https://stackoverflow.com/questions/44169998/how-to-create-a-python-decorator-that-can-wrap-either-coroutine-or-function def span(op, desc_or_func=None): def inner(func): @contextmanager def wrap_with_span(args, kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): yield @wraps(func) def wrapper(*args, **kwargs): if not asyncio.iscoroutinefunction(func): with wrap_with_span(args, kwargs): return func(*args, **kwargs) else: async def tmp(): with wrap_with_span(args, kwargs): return await func(*args, **kwargs) return tmp() return wrapper return inner
... import asyncio from contextlib import contextmanager from functools import wraps from sentry_sdk import Hub # https://stackoverflow.com/questions/44169998/how-to-create-a-python-decorator-that-can-wrap-either-coroutine-or-function def span(op, desc_or_func=None): def inner(func): @contextmanager def wrap_with_span(args, kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): yield @wraps(func) def wrapper(*args, **kwargs): if not asyncio.iscoroutinefunction(func): with wrap_with_span(args, kwargs): return func(*args, **kwargs) else: async def tmp(): with wrap_with_span(args, kwargs): return await func(*args, **kwargs) return tmp() return wrapper return inner ...
62f1dc7283d74ae66a5c1132df2a1c10252ddfa7
src/com/samsarin/gatu/primitive/AbstractChromosome.java
src/com/samsarin/gatu/primitive/AbstractChromosome.java
/* * Copyright (c) 2008 Chris Pettitt */ package com.samsarin.gatu.primitive; import java.util.BitSet; /** * @author [email protected] */ /* package private */ abstract class AbstractChromosome implements Chromosome { /** * Returns the chromosome as a {@link BitSet}. * @return the BitSet view of this chromosome */ abstract BitSet bitSetValue(); public boolean get(int index) { checkBounds(index); return bitSetValue().get(index); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (!(obj instanceof AbstractChromosome)) return false; return bitSetValue().equals(((AbstractChromosome)obj).bitSetValue()); } @Override public int hashCode() { return bitSetValue().hashCode(); } @Override public String toString() { return bitSetValue().toString(); } private void checkBounds(int index) { if (index < 0 || index >= length()) { throw new IndexOutOfBoundsException("Index out of bounds. Expected 0 <= index < " + length() + ". Actual: " + index); } } }
/* * Copyright (c) 2008 Chris Pettitt */ package com.samsarin.gatu.primitive; import java.util.BitSet; /** * @author [email protected] */ /* package private */ abstract class AbstractChromosome implements Chromosome { /** * Returns the chromosome as a {@link BitSet}. * @return the BitSet view of this chromosome */ abstract BitSet bitSetValue(); public boolean get(int index) { checkBounds(index); return bitSetValue().get(index); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (!(obj instanceof AbstractChromosome)) return false; return bitSetValue().equals(((AbstractChromosome)obj).bitSetValue()); } @Override public int hashCode() { return bitSetValue().hashCode(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Chromosome["); for (int i = 0; i < length(); ++i) { sb.append(get(i) ? '1' : '0'); } sb.append("]"); return sb.toString(); } private void checkBounds(int index) { if (index < 0 || index >= length()) { throw new IndexOutOfBoundsException("Index out of bounds. Expected 0 <= index < " + length() + ". Actual: " + index); } } }
Change Chromosome toString() to show actual bit string
Change Chromosome toString() to show actual bit string
Java
mit
cpettitt/gatu
java
## Code Before: /* * Copyright (c) 2008 Chris Pettitt */ package com.samsarin.gatu.primitive; import java.util.BitSet; /** * @author [email protected] */ /* package private */ abstract class AbstractChromosome implements Chromosome { /** * Returns the chromosome as a {@link BitSet}. * @return the BitSet view of this chromosome */ abstract BitSet bitSetValue(); public boolean get(int index) { checkBounds(index); return bitSetValue().get(index); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (!(obj instanceof AbstractChromosome)) return false; return bitSetValue().equals(((AbstractChromosome)obj).bitSetValue()); } @Override public int hashCode() { return bitSetValue().hashCode(); } @Override public String toString() { return bitSetValue().toString(); } private void checkBounds(int index) { if (index < 0 || index >= length()) { throw new IndexOutOfBoundsException("Index out of bounds. Expected 0 <= index < " + length() + ". Actual: " + index); } } } ## Instruction: Change Chromosome toString() to show actual bit string ## Code After: /* * Copyright (c) 2008 Chris Pettitt */ package com.samsarin.gatu.primitive; import java.util.BitSet; /** * @author [email protected] */ /* package private */ abstract class AbstractChromosome implements Chromosome { /** * Returns the chromosome as a {@link BitSet}. * @return the BitSet view of this chromosome */ abstract BitSet bitSetValue(); public boolean get(int index) { checkBounds(index); return bitSetValue().get(index); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (!(obj instanceof AbstractChromosome)) return false; return bitSetValue().equals(((AbstractChromosome)obj).bitSetValue()); } @Override public int hashCode() { return bitSetValue().hashCode(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Chromosome["); for (int i = 0; i < length(); ++i) { sb.append(get(i) ? '1' : '0'); } sb.append("]"); return sb.toString(); } private void checkBounds(int index) { if (index < 0 || index >= length()) { throw new IndexOutOfBoundsException("Index out of bounds. Expected 0 <= index < " + length() + ". Actual: " + index); } } }
# ... existing code ... @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Chromosome["); for (int i = 0; i < length(); ++i) { sb.append(get(i) ? '1' : '0'); } sb.append("]"); return sb.toString(); } private void checkBounds(int index) { # ... rest of the code ...
5e6d52277e34c254bad6b386cf05f490baf6a6f2
webapp-django/accounts/models.py
webapp-django/accounts/models.py
from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver class UserProfile(models.Model): user = models.OneToOneField(User) bio = models.TextField(max_length=256, blank=True) solvedChallenges=models.CharField(solved=[],max_length=256) solvedQuestions=models.CharField(solved=[],max_length=256) score = models.IntegerField(default=0) def __str__(self): return str(self.user.username) # Method to link the User and UserProfile models @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) instance.userprofile.save()
from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from challenges.models import Challenge from questionnaire.models import Question class UserProfile(models.Model): user = models.OneToOneField(User) bio = models.TextField(max_length=256, blank=True) solved_challenges = models.ManyToManyField(Challenge) solved_questions = models.ManyToManyField(Question) score = models.IntegerField(default=0, editable=False) def __str__(self): return str(self.user.username) def calculate_score(self): score = 0 for chal in self.solved_challenges.all(): score = score + chal.score for ques in self.solved_questions.all(): score = score + ques.score return score def save(self, *args, **kwargs): '''On save, update score ''' self.score = self.calculate_score() return super(UserProfile, self).save(*args, **kwargs) # Method to link the User and UserProfile models @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) instance.userprofile.save()
Update accounts model with scoring system
Update accounts model with scoring system
Python
mit
super1337/Super1337-CTF,super1337/Super1337-CTF,super1337/Super1337-CTF
python
## Code Before: from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver class UserProfile(models.Model): user = models.OneToOneField(User) bio = models.TextField(max_length=256, blank=True) solvedChallenges=models.CharField(solved=[],max_length=256) solvedQuestions=models.CharField(solved=[],max_length=256) score = models.IntegerField(default=0) def __str__(self): return str(self.user.username) # Method to link the User and UserProfile models @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) instance.userprofile.save() ## Instruction: Update accounts model with scoring system ## Code After: from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from challenges.models import Challenge from questionnaire.models import Question class UserProfile(models.Model): user = models.OneToOneField(User) bio = models.TextField(max_length=256, blank=True) solved_challenges = models.ManyToManyField(Challenge) solved_questions = models.ManyToManyField(Question) score = models.IntegerField(default=0, editable=False) def __str__(self): return str(self.user.username) def calculate_score(self): score = 0 for chal in self.solved_challenges.all(): score = score + chal.score for ques in self.solved_questions.all(): score = score + ques.score return score def save(self, *args, **kwargs): '''On save, update score ''' self.score = self.calculate_score() return super(UserProfile, self).save(*args, **kwargs) # Method to link the User and UserProfile models @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) instance.userprofile.save()
// ... existing code ... from django.db.models.signals import post_save from django.dispatch import receiver from challenges.models import Challenge from questionnaire.models import Question class UserProfile(models.Model): user = models.OneToOneField(User) bio = models.TextField(max_length=256, blank=True) solved_challenges = models.ManyToManyField(Challenge) solved_questions = models.ManyToManyField(Question) score = models.IntegerField(default=0, editable=False) def __str__(self): return str(self.user.username) def calculate_score(self): score = 0 for chal in self.solved_challenges.all(): score = score + chal.score for ques in self.solved_questions.all(): score = score + ques.score return score def save(self, *args, **kwargs): '''On save, update score ''' self.score = self.calculate_score() return super(UserProfile, self).save(*args, **kwargs) # Method to link the User and UserProfile models // ... rest of the code ...
6bbc09533e28e0912ac2b1683e3d03cb26ce9fa0
ui/aura/aura_switches.h
ui/aura/aura_switches.h
// Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_AURA_AURA_SWITCHES_H_ #define UI_AURA_AURA_SWITCHES_H_ #pragma once namespace switches { extern const char kAuraHostWindowSize[]; extern const char kAuraWindows[]; } // namespace switches #endif // UI_AURA_AURA_SWITCHES_H_
// Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_AURA_AURA_SWITCHES_H_ #define UI_AURA_AURA_SWITCHES_H_ #pragma once #include "ui/aura/aura_export.h" namespace switches { AURA_EXPORT extern const char kAuraHostWindowSize[]; AURA_EXPORT extern const char kAuraWindows[]; } // namespace switches #endif // UI_AURA_AURA_SWITCHES_H_
Fix shared library build for aura.
Fix shared library build for aura. [email protected],[email protected] [email protected],[email protected] BUG=none TEST=none Review URL: http://codereview.chromium.org/8438039 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@108299 0039d316-1c4b-4281-b951-d872f2087c98
C
bsd-3-clause
PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,zcbenz/cefode-chromium,keishi/chromium,Chilledheart/chromium,krieger-od/nwjs_chromium.src,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Chilledheart/chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,markYoungH/chromium.src,Fireblend/chromium-crosswalk,keishi/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,ChromiumWebApps/chromium,jaruba/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Chilledheart/chromium,anirudhSK/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,rogerwang/chromium,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,ltilve/chromium,markYoungH/chromium.src,keishi/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Jonekee/chromium.src,ltilve/chromium,junmin-zhu/chromium-rivertrail,chuan9/chromium-crosswalk,robclark/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,keishi/chromium,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,rogerwang/chromium,ChromiumWebApps/chromium,rogerwang/chromium,M4sse/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,Just-D/chromium-1,Chilledheart/chromium,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,keishi/chromium,robclark/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,hgl888/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,anirudhSK/chromium,nacl-webkit/chrome_deps,dednal/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,keishi/chromium,keishi/chromium,mohamed--abdel-maksoud/chromium.src,junmin-zhu/chromium-rivertrail,bright-sparks/chromium-spacewalk,robclark/chromium,keishi/chromium,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,hujiajie/pa-chromium,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,littlstar/chromium.src,hujiajie/pa-chromium,jaruba/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,dednal/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,keishi/chromium,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,rogerwang/chromium,Chilledheart/chromium,ChromiumWebApps/chromium,ChromiumWebApps/chromium,Jonekee/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,anirudhSK/chromium,rogerwang/chromium,Jonekee/chromium.src,anirudhSK/chromium,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,robclark/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,keishi/chromium,markYoungH/chromium.src,robclark/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,littlstar/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,zcbenz/cefode-chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,axinging/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,hujiajie/pa-chromium,dednal/chromium.src,littlstar/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,M4sse/chromium.src,robclark/chromium,ChromiumWebApps/chromium,anirudhSK/chromium,ondra-novak/chromium.src,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hujiajie/pa-chromium,nacl-webkit/chrome_deps,rogerwang/chromium,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,robclark/chromium,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,littlstar/chromium.src,Chilledheart/chromium,timopulkkinen/BubbleFish,patrickm/chromium.src,robclark/chromium,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,jaruba/chromium.src,markYoungH/chromium.src,ondra-novak/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,ondra-novak/chromium.src,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,Just-D/chromium-1,zcbenz/cefode-chromium,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,robclark/chromium,dushu1203/chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,dednal/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,rogerwang/chromium,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,patrickm/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,keishi/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,patrickm/chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,robclark/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,anirudhSK/chromium
c
## Code Before: // Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_AURA_AURA_SWITCHES_H_ #define UI_AURA_AURA_SWITCHES_H_ #pragma once namespace switches { extern const char kAuraHostWindowSize[]; extern const char kAuraWindows[]; } // namespace switches #endif // UI_AURA_AURA_SWITCHES_H_ ## Instruction: Fix shared library build for aura. [email protected],[email protected] [email protected],[email protected] BUG=none TEST=none Review URL: http://codereview.chromium.org/8438039 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@108299 0039d316-1c4b-4281-b951-d872f2087c98 ## Code After: // Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_AURA_AURA_SWITCHES_H_ #define UI_AURA_AURA_SWITCHES_H_ #pragma once #include "ui/aura/aura_export.h" namespace switches { AURA_EXPORT extern const char kAuraHostWindowSize[]; AURA_EXPORT extern const char kAuraWindows[]; } // namespace switches #endif // UI_AURA_AURA_SWITCHES_H_
// ... existing code ... #define UI_AURA_AURA_SWITCHES_H_ #pragma once #include "ui/aura/aura_export.h" namespace switches { AURA_EXPORT extern const char kAuraHostWindowSize[]; AURA_EXPORT extern const char kAuraWindows[]; } // namespace switches // ... rest of the code ...
a2dc0f3ef55f63c3bb44121d357da670fea0ba0b
net.stickycode.bootstrap/sticky-bootstrap-spring3/src/main/java/net/stickycode/bootstrap/spring3/StickySpringBootstrap.java
net.stickycode.bootstrap/sticky-bootstrap-spring3/src/main/java/net/stickycode/bootstrap/spring3/StickySpringBootstrap.java
package net.stickycode.bootstrap.spring3; import net.stickycode.stereotype.StickyComponent; import net.stickycode.stereotype.StickyPlugin; import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.type.filter.AnnotationTypeFilter; public class StickySpringBootstrap { private GenericApplicationContext context; public StickySpringBootstrap(GenericApplicationContext context) { this.context = context; } public void scan(String... paths) { ClassPathBeanDefinitionScanner scanner = new ClassPathBeanDefinitionScanner(context, false); scanner.setScopeMetadataResolver(new StickyScopeMetadataResolver()); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyComponent.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyPlugin.class)); scanner.scan(paths); } }
package net.stickycode.bootstrap.spring3; import net.stickycode.stereotype.StickyComponent; import net.stickycode.stereotype.StickyPlugin; import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.stereotype.Component; public class StickySpringBootstrap { private GenericApplicationContext context; public StickySpringBootstrap(GenericApplicationContext context) { this.context = context; } public void scan(String... paths) { ClassPathBeanDefinitionScanner scanner = new ClassPathBeanDefinitionScanner(context, false); scanner.setScopeMetadataResolver(new StickyScopeMetadataResolver()); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyComponent.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyPlugin.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(Component.class)); scanner.scan(paths); } }
Use the spring component as well to allow for migrations
Use the spring component as well to allow for migrations
Java
apache-2.0
walterDurin/stickycode,tectronics/stickycode,walterDurin/stickycode,tectronics/stickycode,walterDurin/stickycode,tectronics/stickycode
java
## Code Before: package net.stickycode.bootstrap.spring3; import net.stickycode.stereotype.StickyComponent; import net.stickycode.stereotype.StickyPlugin; import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.type.filter.AnnotationTypeFilter; public class StickySpringBootstrap { private GenericApplicationContext context; public StickySpringBootstrap(GenericApplicationContext context) { this.context = context; } public void scan(String... paths) { ClassPathBeanDefinitionScanner scanner = new ClassPathBeanDefinitionScanner(context, false); scanner.setScopeMetadataResolver(new StickyScopeMetadataResolver()); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyComponent.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyPlugin.class)); scanner.scan(paths); } } ## Instruction: Use the spring component as well to allow for migrations ## Code After: package net.stickycode.bootstrap.spring3; import net.stickycode.stereotype.StickyComponent; import net.stickycode.stereotype.StickyPlugin; import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.stereotype.Component; public class StickySpringBootstrap { private GenericApplicationContext context; public StickySpringBootstrap(GenericApplicationContext context) { this.context = context; } public void scan(String... paths) { ClassPathBeanDefinitionScanner scanner = new ClassPathBeanDefinitionScanner(context, false); scanner.setScopeMetadataResolver(new StickyScopeMetadataResolver()); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyComponent.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyPlugin.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(Component.class)); scanner.scan(paths); } }
// ... existing code ... import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.stereotype.Component; public class StickySpringBootstrap { // ... modified code ... scanner.setScopeMetadataResolver(new StickyScopeMetadataResolver()); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyComponent.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(StickyPlugin.class)); scanner.addIncludeFilter(new AnnotationTypeFilter(Component.class)); scanner.scan(paths); } // ... rest of the code ...
99899f753ff9697f926389efe688c1ae2088c4c4
kpi/management/commands/wait_for_database.py
kpi/management/commands/wait_for_database.py
import time from django.core.management.base import BaseCommand, CommandError from django.db import connection from django.db.utils import OperationalError class Command(BaseCommand): help = ( 'Repeatedly attempt to connect to the default database, exiting ' 'silently once the connection succeeds, or with an error if a ' 'connection cannot be established' ) def add_arguments(self, parser): parser.add_argument( '--retries', default=5, type=int, help=( 'Try this many times before giving up, waiting 2 seconds ' 'between each attempt' ), ) def handle(self, *args, **options): for _ in range(options.get('retries')): try: with connection.cursor() as cursor: return except OperationalError as e: if str(e).strip().endswith('does not exist'): # OK for our purposes if the database doesn't exist; # knowing that proves we were able to connect return time.sleep(2) raise CommandError('Retries exceeded; failed to connect')
import time from django.core.management.base import BaseCommand, CommandError from django.db import connection from django.db.utils import OperationalError class Command(BaseCommand): help = ( 'Repeatedly attempt to connect to the default database, exiting ' 'silently once the connection succeeds, or with an error if a ' 'connection cannot be established' ) def add_arguments(self, parser): parser.add_argument( '--retries', default=5, type=int, help=( 'Try this many times before giving up, waiting 2 seconds ' 'between each attempt' ), ) def handle(self, *args, **options): for _ in range(options.get('retries')): try: with connection.cursor() as cursor: return except OperationalError as e: if str(e).strip().endswith('does not exist'): # OK for our purposes if the database doesn't exist; # knowing that proves we were able to connect return time.sleep(2) raise CommandError( 'Retries exceeded; failed to connect to default database' )
Make database connection error more descriptive
Make database connection error more descriptive
Python
agpl-3.0
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
python
## Code Before: import time from django.core.management.base import BaseCommand, CommandError from django.db import connection from django.db.utils import OperationalError class Command(BaseCommand): help = ( 'Repeatedly attempt to connect to the default database, exiting ' 'silently once the connection succeeds, or with an error if a ' 'connection cannot be established' ) def add_arguments(self, parser): parser.add_argument( '--retries', default=5, type=int, help=( 'Try this many times before giving up, waiting 2 seconds ' 'between each attempt' ), ) def handle(self, *args, **options): for _ in range(options.get('retries')): try: with connection.cursor() as cursor: return except OperationalError as e: if str(e).strip().endswith('does not exist'): # OK for our purposes if the database doesn't exist; # knowing that proves we were able to connect return time.sleep(2) raise CommandError('Retries exceeded; failed to connect') ## Instruction: Make database connection error more descriptive ## Code After: import time from django.core.management.base import BaseCommand, CommandError from django.db import connection from django.db.utils import OperationalError class Command(BaseCommand): help = ( 'Repeatedly attempt to connect to the default database, exiting ' 'silently once the connection succeeds, or with an error if a ' 'connection cannot be established' ) def add_arguments(self, parser): parser.add_argument( '--retries', default=5, type=int, help=( 'Try this many times before giving up, waiting 2 seconds ' 'between each attempt' ), ) def handle(self, *args, **options): for _ in range(options.get('retries')): try: with connection.cursor() as cursor: return except OperationalError as e: if str(e).strip().endswith('does not exist'): # OK for our purposes if the database doesn't exist; # knowing that proves we were able to connect return time.sleep(2) raise CommandError( 'Retries exceeded; failed to connect to default database' )
// ... existing code ... return time.sleep(2) raise CommandError( 'Retries exceeded; failed to connect to default database' ) // ... rest of the code ...
71c47c8374cf6c5f53cdfbb71763f165bcd6c013
oneflow/base/tests/__init__.py
oneflow/base/tests/__init__.py
import redis from mongoengine.connection import connect, disconnect from django.conf import settings TEST_REDIS = redis.StrictRedis(host=settings.REDIS_TEST_HOST, port=settings.REDIS_TEST_PORT, db=settings.REDIS_TEST_DB) def connect_mongodb_testsuite(): disconnect() connect('{0}_testsuite'.format(settings.MONGODB_NAME)) __all__ = ('TEST_REDIS', 'connect_mongodb_testsuite', )
import redis from mongoengine.connection import connect, disconnect from django.conf import settings TEST_REDIS = redis.StrictRedis(host=settings.REDIS_TEST_HOST, port=settings.REDIS_TEST_PORT, db=settings.REDIS_TEST_DB) def connect_mongodb_testsuite(): disconnect() connect('{0}_testsuite'.format(settings.MONGODB_NAME), tz_aware=settings.USE_TZ) __all__ = ('TEST_REDIS', 'connect_mongodb_testsuite', )
Make the test MongoDB database TZ aware like the production one, else some date comparisons fail, whereas they succeed in production.
Make the test MongoDB database TZ aware like the production one, else some date comparisons fail, whereas they succeed in production.
Python
agpl-3.0
1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow
python
## Code Before: import redis from mongoengine.connection import connect, disconnect from django.conf import settings TEST_REDIS = redis.StrictRedis(host=settings.REDIS_TEST_HOST, port=settings.REDIS_TEST_PORT, db=settings.REDIS_TEST_DB) def connect_mongodb_testsuite(): disconnect() connect('{0}_testsuite'.format(settings.MONGODB_NAME)) __all__ = ('TEST_REDIS', 'connect_mongodb_testsuite', ) ## Instruction: Make the test MongoDB database TZ aware like the production one, else some date comparisons fail, whereas they succeed in production. ## Code After: import redis from mongoengine.connection import connect, disconnect from django.conf import settings TEST_REDIS = redis.StrictRedis(host=settings.REDIS_TEST_HOST, port=settings.REDIS_TEST_PORT, db=settings.REDIS_TEST_DB) def connect_mongodb_testsuite(): disconnect() connect('{0}_testsuite'.format(settings.MONGODB_NAME), tz_aware=settings.USE_TZ) __all__ = ('TEST_REDIS', 'connect_mongodb_testsuite', )
... def connect_mongodb_testsuite(): disconnect() connect('{0}_testsuite'.format(settings.MONGODB_NAME), tz_aware=settings.USE_TZ) __all__ = ('TEST_REDIS', 'connect_mongodb_testsuite', ) ...
3cc90bb8ccce7b2feefe95173f095a71996d2633
setup.py
setup.py
from distutils.core import setup, Command class TestDiscovery(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys, subprocess errno = subprocess.call([ sys.executable, '-m', 'unittest', 'discover', '-p', '*.py', 'tests', ]) raise SystemExit(errno) setup(name='steel', version='0.1', description='A Python framework for describing binary file formats', author='Marty Alchin', author_email='[email protected]', url='https://github.com/gulopine/steel', packages=['steel', 'steel.bits', 'steel.chunks', 'steel.common', 'steel.fields'], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: System :: Filesystems', ], cmdclass={'test': TestDiscovery}, )
from distutils.core import setup, Command class TestDiscovery(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys, subprocess errno = subprocess.call([ sys.executable, '-m', 'unittest', 'discover', '-p', '*.py', 'tests', ]) raise SystemExit(errno) setup(name='steel', version='0.2', description='A Python framework for describing binary file formats', author='Marty Alchin', author_email='[email protected]', url='https://github.com/gulopine/steel', packages=['steel', 'steel.bits', 'steel.chunks', 'steel.common', 'steel.fields'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: System :: Filesystems', ], cmdclass={'test': TestDiscovery}, )
Update to version 0.2 and some trove classifiers
Update to version 0.2 and some trove classifiers
Python
bsd-3-clause
gulopine/steel
python
## Code Before: from distutils.core import setup, Command class TestDiscovery(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys, subprocess errno = subprocess.call([ sys.executable, '-m', 'unittest', 'discover', '-p', '*.py', 'tests', ]) raise SystemExit(errno) setup(name='steel', version='0.1', description='A Python framework for describing binary file formats', author='Marty Alchin', author_email='[email protected]', url='https://github.com/gulopine/steel', packages=['steel', 'steel.bits', 'steel.chunks', 'steel.common', 'steel.fields'], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: System :: Filesystems', ], cmdclass={'test': TestDiscovery}, ) ## Instruction: Update to version 0.2 and some trove classifiers ## Code After: from distutils.core import setup, Command class TestDiscovery(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys, subprocess errno = subprocess.call([ sys.executable, '-m', 'unittest', 'discover', '-p', '*.py', 'tests', ]) raise SystemExit(errno) setup(name='steel', version='0.2', description='A Python framework for describing binary file formats', author='Marty Alchin', author_email='[email protected]', url='https://github.com/gulopine/steel', packages=['steel', 'steel.bits', 'steel.chunks', 'steel.common', 'steel.fields'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: System :: Filesystems', ], cmdclass={'test': TestDiscovery}, )
... setup(name='steel', version='0.2', description='A Python framework for describing binary file formats', author='Marty Alchin', author_email='[email protected]', ... url='https://github.com/gulopine/steel', packages=['steel', 'steel.bits', 'steel.chunks', 'steel.common', 'steel.fields'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: System :: Filesystems', ], ...
7a33e2e94e46dc3465a088cf4755134a09f6627c
src/models/user.py
src/models/user.py
from flock import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) def __init__(self): pass
from flock import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) suggestions = db.relationship('Suggestion', secondary=suggestions, backref=db.backref('users', lazy='dynamic')) def __init__(self): pass
Add suggestions to User model
Add suggestions to User model
Python
agpl-3.0
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
python
## Code Before: from flock import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) def __init__(self): pass ## Instruction: Add suggestions to User model ## Code After: from flock import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) suggestions = db.relationship('Suggestion', secondary=suggestions, backref=db.backref('users', lazy='dynamic')) def __init__(self): pass
# ... existing code ... class User(db.Model): id = db.Column(db.Integer, primary_key=True) suggestions = db.relationship('Suggestion', secondary=suggestions, backref=db.backref('users', lazy='dynamic')) def __init__(self): pass # ... rest of the code ...
fbb9d30f44ad3373b96ef0319a4fb404b60be5de
critical-mutex.c
critical-mutex.c
static uv_mutex_t mutex; static uv_thread_t thread; static int crit_data = 0; static void thread_cb(void* arg) { uv_mutex_lock(&mutex); printf("thread mutex start\n"); crit_data = 2; printf("thread mutex end\n"); uv_mutex_unlock(&mutex); } int main() { assert(0 == uv_mutex_init(&mutex)); assert(0 == uv_thread_create(&thread, thread_cb, NULL)); uv_mutex_lock(&mutex); printf("main mutex start\n"); sleep(1); crit_data = 1; printf("main mutex end\n"); uv_mutex_unlock(&mutex); uv_thread_join(&thread); uv_mutex_destroy(&mutex); return 0; }
static uv_mutex_t mutex; static uv_thread_t thread; static void thread_cb(void* arg) { printf("thread_cb\n"); uv_mutex_lock(&mutex); printf("thread mutex\n"); uv_mutex_unlock(&mutex); } int main() { assert(0 == uv_mutex_init(&mutex)); assert(0 == uv_thread_create(&thread, thread_cb, NULL)); uv_mutex_lock(&mutex); printf("main mutex start\n"); sleep(1); printf("main mutex end\n"); uv_mutex_unlock(&mutex); uv_thread_join(&thread); uv_mutex_destroy(&mutex); return 0; }
Make use of mutex easier to see
Make use of mutex easier to see With the access to the data, the use of the actual mutex could have been confused. By simply using the mutex the example is a lot more clear.
C
mit
trevnorris/libuv-examples
c
## Code Before: static uv_mutex_t mutex; static uv_thread_t thread; static int crit_data = 0; static void thread_cb(void* arg) { uv_mutex_lock(&mutex); printf("thread mutex start\n"); crit_data = 2; printf("thread mutex end\n"); uv_mutex_unlock(&mutex); } int main() { assert(0 == uv_mutex_init(&mutex)); assert(0 == uv_thread_create(&thread, thread_cb, NULL)); uv_mutex_lock(&mutex); printf("main mutex start\n"); sleep(1); crit_data = 1; printf("main mutex end\n"); uv_mutex_unlock(&mutex); uv_thread_join(&thread); uv_mutex_destroy(&mutex); return 0; } ## Instruction: Make use of mutex easier to see With the access to the data, the use of the actual mutex could have been confused. By simply using the mutex the example is a lot more clear. ## Code After: static uv_mutex_t mutex; static uv_thread_t thread; static void thread_cb(void* arg) { printf("thread_cb\n"); uv_mutex_lock(&mutex); printf("thread mutex\n"); uv_mutex_unlock(&mutex); } int main() { assert(0 == uv_mutex_init(&mutex)); assert(0 == uv_thread_create(&thread, thread_cb, NULL)); uv_mutex_lock(&mutex); printf("main mutex start\n"); sleep(1); printf("main mutex end\n"); uv_mutex_unlock(&mutex); uv_thread_join(&thread); uv_mutex_destroy(&mutex); return 0; }
... static uv_mutex_t mutex; static uv_thread_t thread; static void thread_cb(void* arg) { printf("thread_cb\n"); uv_mutex_lock(&mutex); printf("thread mutex\n"); uv_mutex_unlock(&mutex); } int main() { assert(0 == uv_mutex_init(&mutex)); ... uv_mutex_lock(&mutex); printf("main mutex start\n"); sleep(1); printf("main mutex end\n"); uv_mutex_unlock(&mutex); ...
5b4ba4e6cbb6cae1793c699a540aecb64236ca34
riot/app.py
riot/app.py
import urwid def run_tag(tag, *args, **kwargs): loop = urwid.MainLoop(tag, *args, **kwargs) loop.run() def quit_app(): raise urwid.ExitMainLoop()
import urwid def run_tag(tag, *args, **kwargs): loop = urwid.MainLoop(tag, *args, **kwargs) loop.screen.set_terminal_properties(colors=256) loop.run() def quit_app(): raise urwid.ExitMainLoop()
Set default property screen 256 colors.
Set default property screen 256 colors.
Python
mit
soasme/riotpy
python
## Code Before: import urwid def run_tag(tag, *args, **kwargs): loop = urwid.MainLoop(tag, *args, **kwargs) loop.run() def quit_app(): raise urwid.ExitMainLoop() ## Instruction: Set default property screen 256 colors. ## Code After: import urwid def run_tag(tag, *args, **kwargs): loop = urwid.MainLoop(tag, *args, **kwargs) loop.screen.set_terminal_properties(colors=256) loop.run() def quit_app(): raise urwid.ExitMainLoop()
... def run_tag(tag, *args, **kwargs): loop = urwid.MainLoop(tag, *args, **kwargs) loop.screen.set_terminal_properties(colors=256) loop.run() def quit_app(): ...
d448367d68e37c2d719063b8ec2ce543fec5b5e7
sphinxcontrib/reviewbuilder/__init__.py
sphinxcontrib/reviewbuilder/__init__.py
from __future__ import absolute_import from docutils.nodes import Text, paragraph from sphinxcontrib.reviewbuilder.reviewbuilder import ReVIEWBuilder # from japanesesupport.py def trunc_whitespace(app, doctree, docname): for node in doctree.traverse(Text): if isinstance(node.parent, paragraph): newtext = node.astext() for c in "\n\r\t": newtext = newtext.replace(c, "") newtext = newtext.strip() node.parent.replace(node, Text(newtext)) def setup(app): app.add_builder(ReVIEWBuilder) app.connect("doctree-resolved", trunc_whitespace)
from __future__ import absolute_import from docutils import nodes from sphinxcontrib.reviewbuilder.reviewbuilder import ReVIEWBuilder # from japanesesupport.py def trunc_whitespace(app, doctree, docname): for node in doctree.traverse(nodes.Text): if isinstance(node.parent, nodes.paragraph): newtext = node.astext() for c in "\n\r\t": newtext = newtext.replace(c, "") newtext = newtext.strip() node.parent.replace(node, nodes.Text(newtext)) def setup(app): app.add_builder(ReVIEWBuilder) app.connect("doctree-resolved", trunc_whitespace)
Access docutils standard nodes through nodes.*
Access docutils standard nodes through nodes.*
Python
lgpl-2.1
shirou/sphinxcontrib-reviewbuilder
python
## Code Before: from __future__ import absolute_import from docutils.nodes import Text, paragraph from sphinxcontrib.reviewbuilder.reviewbuilder import ReVIEWBuilder # from japanesesupport.py def trunc_whitespace(app, doctree, docname): for node in doctree.traverse(Text): if isinstance(node.parent, paragraph): newtext = node.astext() for c in "\n\r\t": newtext = newtext.replace(c, "") newtext = newtext.strip() node.parent.replace(node, Text(newtext)) def setup(app): app.add_builder(ReVIEWBuilder) app.connect("doctree-resolved", trunc_whitespace) ## Instruction: Access docutils standard nodes through nodes.* ## Code After: from __future__ import absolute_import from docutils import nodes from sphinxcontrib.reviewbuilder.reviewbuilder import ReVIEWBuilder # from japanesesupport.py def trunc_whitespace(app, doctree, docname): for node in doctree.traverse(nodes.Text): if isinstance(node.parent, nodes.paragraph): newtext = node.astext() for c in "\n\r\t": newtext = newtext.replace(c, "") newtext = newtext.strip() node.parent.replace(node, nodes.Text(newtext)) def setup(app): app.add_builder(ReVIEWBuilder) app.connect("doctree-resolved", trunc_whitespace)
... from __future__ import absolute_import from docutils import nodes from sphinxcontrib.reviewbuilder.reviewbuilder import ReVIEWBuilder ... # from japanesesupport.py def trunc_whitespace(app, doctree, docname): for node in doctree.traverse(nodes.Text): if isinstance(node.parent, nodes.paragraph): newtext = node.astext() for c in "\n\r\t": newtext = newtext.replace(c, "") newtext = newtext.strip() node.parent.replace(node, nodes.Text(newtext)) def setup(app): ...
ba4b348e03f5f875bb170a8b7d5c560ba7c6968f
features/groups/migrations/0002_auto_20160922_1108.py
features/groups/migrations/0002_auto_20160922_1108.py
from __future__ import unicode_literals from django.db import migrations def copy_groups(apps, schema_editor): Group1 = apps.get_model('entities.Group') Group2 = apps.get_model('groups.Group') for g in Group1.objects.all(): g2 = Group2.objects.create( name=g.name, slug=g.slug, address=g.address, avatar=g.avatar, avatar_color=g.avatar_color, date_founded=g.date_founded, description=g.description, logo=g.logo, url=g.url, closed=g.closed) g2.date_created = g.date_created g2.slug = g.slug g2.save() class Migration(migrations.Migration): dependencies = [ ('groups', '0001_initial'), ] operations = [ migrations.RunPython(copy_groups) ]
from __future__ import unicode_literals from django.db import migrations def copy_groups(apps, schema_editor): Group1 = apps.get_model('entities.Group') Group2 = apps.get_model('groups.Group') for g in Group1.objects.order_by('id'): g2 = Group2.objects.create( name=g.name, slug=g.slug, address=g.address, avatar=g.avatar, avatar_color=g.avatar_color, date_founded=g.date_founded, description=g.description, logo=g.logo, url=g.url, closed=g.closed) g2.date_created = g.date_created g2.slug = g.slug g2.save() class Migration(migrations.Migration): dependencies = [ ('groups', '0001_initial'), ] operations = [ migrations.RunPython(copy_groups) ]
Order groups by id when copying
Order groups by id when copying
Python
agpl-3.0
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
python
## Code Before: from __future__ import unicode_literals from django.db import migrations def copy_groups(apps, schema_editor): Group1 = apps.get_model('entities.Group') Group2 = apps.get_model('groups.Group') for g in Group1.objects.all(): g2 = Group2.objects.create( name=g.name, slug=g.slug, address=g.address, avatar=g.avatar, avatar_color=g.avatar_color, date_founded=g.date_founded, description=g.description, logo=g.logo, url=g.url, closed=g.closed) g2.date_created = g.date_created g2.slug = g.slug g2.save() class Migration(migrations.Migration): dependencies = [ ('groups', '0001_initial'), ] operations = [ migrations.RunPython(copy_groups) ] ## Instruction: Order groups by id when copying ## Code After: from __future__ import unicode_literals from django.db import migrations def copy_groups(apps, schema_editor): Group1 = apps.get_model('entities.Group') Group2 = apps.get_model('groups.Group') for g in Group1.objects.order_by('id'): g2 = Group2.objects.create( name=g.name, slug=g.slug, address=g.address, avatar=g.avatar, avatar_color=g.avatar_color, date_founded=g.date_founded, description=g.description, logo=g.logo, url=g.url, closed=g.closed) g2.date_created = g.date_created g2.slug = g.slug g2.save() class Migration(migrations.Migration): dependencies = [ ('groups', '0001_initial'), ] operations = [ migrations.RunPython(copy_groups) ]
... def copy_groups(apps, schema_editor): Group1 = apps.get_model('entities.Group') Group2 = apps.get_model('groups.Group') for g in Group1.objects.order_by('id'): g2 = Group2.objects.create( name=g.name, slug=g.slug, ...
63709e388ed86892e9771b86daad4a9ec3c0bd44
ibei/__init__.py
ibei/__init__.py
from .version import __version__ from .uibei import uibei from .sqsolarcell import SQSolarcell from .devossolarcell import DeVosSolarcell
from .version import __version__ from .uibei import uibei
Remove import of specific models
Remove import of specific models
Python
mit
jrsmith3/ibei
python
## Code Before: from .version import __version__ from .uibei import uibei from .sqsolarcell import SQSolarcell from .devossolarcell import DeVosSolarcell ## Instruction: Remove import of specific models ## Code After: from .version import __version__ from .uibei import uibei
# ... existing code ... from .version import __version__ from .uibei import uibei # ... rest of the code ...
c977e1c235ccb040f28bc03c63d2667924d5edd3
pythonforandroid/recipes/xeddsa/__init__.py
pythonforandroid/recipes/xeddsa/__init__.py
from pythonforandroid.recipe import CythonRecipe from pythonforandroid.toolchain import current_directory, shprint from os.path import join import sh class XedDSARecipe(CythonRecipe): name = 'xeddsa' version = '0.4.4' url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz' depends = [ 'setuptools', 'cffi', 'pynacl', ] patches = ['remove_dependencies.patch'] call_hostpython_via_targetpython = False def build_arch(self, arch): with current_directory(join(self.get_build_dir(arch.arch))): env = self.get_recipe_env(arch) hostpython = sh.Command(self.ctx.hostpython) shprint( hostpython, 'ref10/build.py', _env=env ) shprint(sh.cp, '_crypto_sign.so', self.ctx.get_site_packages_dir()) self.install_python_package(arch) recipe = XedDSARecipe()
from pythonforandroid.recipe import CythonRecipe from pythonforandroid.toolchain import current_directory, shprint from os.path import join import sh class XedDSARecipe(CythonRecipe): name = 'xeddsa' version = '0.4.4' url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz' depends = [ 'setuptools', 'cffi', 'pynacl', ] patches = ['remove_dependencies.patch'] call_hostpython_via_targetpython = False def build_arch(self, arch): with current_directory(join(self.get_build_dir(arch.arch))): env = self.get_recipe_env(arch) hostpython = sh.Command(self.ctx.hostpython) shprint( hostpython, 'ref10/build.py', _env=env ) # the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` # or simply `_crypto_sign.so` depending on the platform/distribution sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir()) self.install_python_package(arch) recipe = XedDSARecipe()
Fix xeddsa crypto_sign shared lib copy
Fix xeddsa crypto_sign shared lib copy Could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` or simply `_crypto_sign.so` depending on the platform/distribution
Python
mit
germn/python-for-android,rnixx/python-for-android,rnixx/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,PKRoma/python-for-android,germn/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,kivy/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,germn/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,PKRoma/python-for-android
python
## Code Before: from pythonforandroid.recipe import CythonRecipe from pythonforandroid.toolchain import current_directory, shprint from os.path import join import sh class XedDSARecipe(CythonRecipe): name = 'xeddsa' version = '0.4.4' url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz' depends = [ 'setuptools', 'cffi', 'pynacl', ] patches = ['remove_dependencies.patch'] call_hostpython_via_targetpython = False def build_arch(self, arch): with current_directory(join(self.get_build_dir(arch.arch))): env = self.get_recipe_env(arch) hostpython = sh.Command(self.ctx.hostpython) shprint( hostpython, 'ref10/build.py', _env=env ) shprint(sh.cp, '_crypto_sign.so', self.ctx.get_site_packages_dir()) self.install_python_package(arch) recipe = XedDSARecipe() ## Instruction: Fix xeddsa crypto_sign shared lib copy Could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` or simply `_crypto_sign.so` depending on the platform/distribution ## Code After: from pythonforandroid.recipe import CythonRecipe from pythonforandroid.toolchain import current_directory, shprint from os.path import join import sh class XedDSARecipe(CythonRecipe): name = 'xeddsa' version = '0.4.4' url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz' depends = [ 'setuptools', 'cffi', 'pynacl', ] patches = ['remove_dependencies.patch'] call_hostpython_via_targetpython = False def build_arch(self, arch): with current_directory(join(self.get_build_dir(arch.arch))): env = self.get_recipe_env(arch) hostpython = sh.Command(self.ctx.hostpython) shprint( hostpython, 'ref10/build.py', _env=env ) # the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` # or simply `_crypto_sign.so` depending on the platform/distribution sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir()) self.install_python_package(arch) recipe = XedDSARecipe()
# ... existing code ... hostpython, 'ref10/build.py', _env=env ) # the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` # or simply `_crypto_sign.so` depending on the platform/distribution sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir()) self.install_python_package(arch) # ... rest of the code ...
f936e1b869713c43d260827f46b8e819b02ed741
src/main/java/net/md_5/bungee/command/CommandAlert.java
src/main/java/net/md_5/bungee/command/CommandAlert.java
package net.md_5.bungee.command; import net.md_5.bungee.BungeeCord; import net.md_5.bungee.ChatColor; import net.md_5.bungee.Permission; import net.md_5.bungee.UserConnection; public class CommandAlert extends Command { @Override public void execute(CommandSender sender, String[] args) { if (getPermission(sender) != Permission.ADMIN) { sender.sendMessage(ChatColor.RED + "You do not have permission to execute this command!"); return; } if (args.length == 0) { sender.sendMessage(ChatColor.RED + "You must supply a message."); } else { StringBuilder builder = new StringBuilder(); if (!args[0].contains("&h")) //They want to hide the alert prefix { builder.append(ChatColor.DARK_PURPLE); builder.append("[Alert] "); //No space at start. } else { args[0].replaceAll("&h", ""); //Remove hide control code from message } for (String s : args) { builder.append(ChatColor.translateAlternateColorCodes('&', s)); //Allow custom colours builder.append(" "); } String message = builder.substring(0, builder.length() - 1); for (UserConnection con : BungeeCord.instance.connections.values()) { con.sendMessage(message); } } } }
package net.md_5.bungee.command; import net.md_5.bungee.BungeeCord; import net.md_5.bungee.ChatColor; import net.md_5.bungee.Permission; import net.md_5.bungee.UserConnection; public class CommandAlert extends Command { @Override public void execute(CommandSender sender, String[] args) { if (getPermission(sender) != Permission.ADMIN) { sender.sendMessage(ChatColor.RED + "You do not have permission to execute this command!"); return; } if (args.length == 0) { sender.sendMessage(ChatColor.RED + "You must supply a message."); } else { StringBuilder builder = new StringBuilder(); if (!args[0].contains("&h")) //They want to hide the alert prefix { builder.append(ChatColor.DARK_PURPLE); builder.append("[Alert] "); //No space at start. } for (String s : args) { s = s.replaceAll("&h", ""); //Fix replace builder.append(ChatColor.translateAlternateColorCodes('&', s)); //Allow custom colours builder.append(" "); } String message = builder.substring(0, builder.length() - 1); for (UserConnection con : BungeeCord.instance.connections.values()) { con.sendMessage(message); } } } }
Fix &h not being removed from the message properly
Fix &h not being removed from the message properly
Java
bsd-3-clause
dentmaged/BungeeCord,LinEvil/BungeeCord,BlueAnanas/BungeeCord,ewized/BungeeCord,starlis/BungeeCord,dentmaged/BungeeCord,mariolars/BungeeCord,xxyy/BungeeCord,Xetius/BungeeCord,LetsPlayOnline/BungeeJumper,starlis/BungeeCord,GingerGeek/BungeeCord,BlueAnanas/BungeeCord,Yive/BungeeCord,GamesConMCGames/Bungeecord,ewized/BungeeCord,ewized/BungeeCord,XMeowTW/BungeeCord,Yive/BungeeCord,TCPR/BungeeCord,GamesConMCGames/Bungeecord,Xetius/BungeeCord,xxyy/BungeeCord,XMeowTW/BungeeCord,TCPR/BungeeCord,ConnorLinfoot/BungeeCord,LetsPlayOnline/BungeeJumper,ConnorLinfoot/BungeeCord,mariolars/BungeeCord,ConnorLinfoot/BungeeCord,BlueAnanas/BungeeCord,Yive/BungeeCord,GamesConMCGames/Bungeecord,PrisonPvP/BungeeCord,LolnetModPack/BungeeCord,LinEvil/BungeeCord,LolnetModPack/BungeeCord,btilm305/BungeeCord,xxyy/BungeeCord,Xetius/BungeeCord,LolnetModPack/BungeeCord,TCPR/BungeeCord,LinEvil/BungeeCord,GingerGeek/BungeeCord,btilm305/BungeeCord,btilm305/BungeeCord,mariolars/BungeeCord,PrisonPvP/BungeeCord,GingerGeek/BungeeCord,XMeowTW/BungeeCord,LetsPlayOnline/BungeeJumper,starlis/BungeeCord,PrisonPvP/BungeeCord,dentmaged/BungeeCord
java
## Code Before: package net.md_5.bungee.command; import net.md_5.bungee.BungeeCord; import net.md_5.bungee.ChatColor; import net.md_5.bungee.Permission; import net.md_5.bungee.UserConnection; public class CommandAlert extends Command { @Override public void execute(CommandSender sender, String[] args) { if (getPermission(sender) != Permission.ADMIN) { sender.sendMessage(ChatColor.RED + "You do not have permission to execute this command!"); return; } if (args.length == 0) { sender.sendMessage(ChatColor.RED + "You must supply a message."); } else { StringBuilder builder = new StringBuilder(); if (!args[0].contains("&h")) //They want to hide the alert prefix { builder.append(ChatColor.DARK_PURPLE); builder.append("[Alert] "); //No space at start. } else { args[0].replaceAll("&h", ""); //Remove hide control code from message } for (String s : args) { builder.append(ChatColor.translateAlternateColorCodes('&', s)); //Allow custom colours builder.append(" "); } String message = builder.substring(0, builder.length() - 1); for (UserConnection con : BungeeCord.instance.connections.values()) { con.sendMessage(message); } } } } ## Instruction: Fix &h not being removed from the message properly ## Code After: package net.md_5.bungee.command; import net.md_5.bungee.BungeeCord; import net.md_5.bungee.ChatColor; import net.md_5.bungee.Permission; import net.md_5.bungee.UserConnection; public class CommandAlert extends Command { @Override public void execute(CommandSender sender, String[] args) { if (getPermission(sender) != Permission.ADMIN) { sender.sendMessage(ChatColor.RED + "You do not have permission to execute this command!"); return; } if (args.length == 0) { sender.sendMessage(ChatColor.RED + "You must supply a message."); } else { StringBuilder builder = new StringBuilder(); if (!args[0].contains("&h")) //They want to hide the alert prefix { builder.append(ChatColor.DARK_PURPLE); builder.append("[Alert] "); //No space at start. } for (String s : args) { s = s.replaceAll("&h", ""); //Fix replace builder.append(ChatColor.translateAlternateColorCodes('&', s)); //Allow custom colours builder.append(" "); } String message = builder.substring(0, builder.length() - 1); for (UserConnection con : BungeeCord.instance.connections.values()) { con.sendMessage(message); } } } }
// ... existing code ... StringBuilder builder = new StringBuilder(); if (!args[0].contains("&h")) //They want to hide the alert prefix { builder.append(ChatColor.DARK_PURPLE); builder.append("[Alert] "); //No space at start. } for (String s : args) { s = s.replaceAll("&h", ""); //Fix replace builder.append(ChatColor.translateAlternateColorCodes('&', s)); //Allow custom colours builder.append(" "); } String message = builder.substring(0, builder.length() - 1); for (UserConnection con : BungeeCord.instance.connections.values()) { // ... rest of the code ...
6722e16aef43f9cfe03e7e76fc578582139721f6
vint/linting/env.py
vint/linting/env.py
import os import os.path import re import logging from pathlib import Path VIM_SCRIPT_FILE_NAME_PATTERNS = r'(?:[\._]g?vimrc|.*\.vim$)' def build_environment(cmdargs): return { 'cmdargs': cmdargs, 'home_path': _get_home_path(cmdargs), 'cwd': _get_cwd(cmdargs), 'file_paths': _get_file_paths(cmdargs) } def _get_cwd(cmdargs): return Path(os.getcwd()) def _get_home_path(cmdargs): return Path(os.path.expanduser('~')) def _get_file_paths(cmdargs): if 'files' not in cmdargs: return [] found_files = _collect_files([Path(path) for path in cmdargs['files']]) return found_files def _collect_files(paths): result = set() for path in paths: if path.is_dir(): dir_path = path result |= _collect_files(tuple(dir_path.iterdir())) elif _is_vim_script(path): file_path = path result.add(file_path) else: logging.debug('ignore not Vim script file: `{file_path}`'.format( file_path=str(path))) return result def _is_vim_script(path): file_name = path.name return bool(re.search(VIM_SCRIPT_FILE_NAME_PATTERNS, file_name))
import os import os.path from pathlib import Path from vint.linting.file_filter import find_vim_script def build_environment(cmdargs): return { 'cmdargs': cmdargs, 'home_path': _get_home_path(cmdargs), 'cwd': _get_cwd(cmdargs), 'file_paths': _get_file_paths(cmdargs) } def _get_cwd(cmdargs): return Path(os.getcwd()) def _get_home_path(cmdargs): return Path(os.path.expanduser('~')) def _get_file_paths(cmdargs): if 'files' not in cmdargs: return [] found_file_paths = find_vim_script(map(Path, cmdargs['files'])) return set(found_file_paths)
Split file collecting algorithm to FileFilter
Split file collecting algorithm to FileFilter
Python
mit
Kuniwak/vint,RianFuro/vint,RianFuro/vint,Kuniwak/vint
python
## Code Before: import os import os.path import re import logging from pathlib import Path VIM_SCRIPT_FILE_NAME_PATTERNS = r'(?:[\._]g?vimrc|.*\.vim$)' def build_environment(cmdargs): return { 'cmdargs': cmdargs, 'home_path': _get_home_path(cmdargs), 'cwd': _get_cwd(cmdargs), 'file_paths': _get_file_paths(cmdargs) } def _get_cwd(cmdargs): return Path(os.getcwd()) def _get_home_path(cmdargs): return Path(os.path.expanduser('~')) def _get_file_paths(cmdargs): if 'files' not in cmdargs: return [] found_files = _collect_files([Path(path) for path in cmdargs['files']]) return found_files def _collect_files(paths): result = set() for path in paths: if path.is_dir(): dir_path = path result |= _collect_files(tuple(dir_path.iterdir())) elif _is_vim_script(path): file_path = path result.add(file_path) else: logging.debug('ignore not Vim script file: `{file_path}`'.format( file_path=str(path))) return result def _is_vim_script(path): file_name = path.name return bool(re.search(VIM_SCRIPT_FILE_NAME_PATTERNS, file_name)) ## Instruction: Split file collecting algorithm to FileFilter ## Code After: import os import os.path from pathlib import Path from vint.linting.file_filter import find_vim_script def build_environment(cmdargs): return { 'cmdargs': cmdargs, 'home_path': _get_home_path(cmdargs), 'cwd': _get_cwd(cmdargs), 'file_paths': _get_file_paths(cmdargs) } def _get_cwd(cmdargs): return Path(os.getcwd()) def _get_home_path(cmdargs): return Path(os.path.expanduser('~')) def _get_file_paths(cmdargs): if 'files' not in cmdargs: return [] found_file_paths = find_vim_script(map(Path, cmdargs['files'])) return set(found_file_paths)
# ... existing code ... import os import os.path from pathlib import Path from vint.linting.file_filter import find_vim_script def build_environment(cmdargs): # ... modified code ... if 'files' not in cmdargs: return [] found_file_paths = find_vim_script(map(Path, cmdargs['files'])) return set(found_file_paths) # ... rest of the code ...
9b54d728a245855cba724a91d372a15a4f4abb6d
shop/checkout/models.py
shop/checkout/models.py
"""Checkout Models""" import functools from flask import redirect, url_for from fulfil_client.model import ModelType, StringType from shop.fulfilio import Model from shop.globals import current_cart, current_channel def not_empty_cart(function): @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.is_empty: return redirect(url_for('cart.view_cart')) return function(*args, **kwargs) return wrapper def sale_has_non_guest_party(function): """ Ensure that the sale has a party who is not guest. The sign-in method authomatically changes the party to a party based on the session. """ @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.sale and cart.sale.party and \ cart.sale.party.id == current_channel.anonymous_customer.id: return redirect(url_for('checkout.sign_in')) return function(*args, **kwargs) return wrapper class PaymentGateway(Model): __model_name__ = 'payment_gateway.gateway' provider = StringType() stripe_publishable_key = StringType() class PaymentProfile(Model): __model_name__ = 'party.payment_profile' party = ModelType('party.party') gateway = ModelType('payment_gateway.gateway') last_4_digits = StringType() rec_name = StringType()
"""Checkout Models""" import functools from flask import redirect, url_for from fulfil_client.model import ModelType, StringType from shop.fulfilio import Model from shop.globals import current_cart, current_channel def not_empty_cart(function): @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.is_empty: return redirect(url_for('cart.view_cart')) return function(*args, **kwargs) return wrapper def sale_has_non_guest_party(function): """ Ensure that the sale has a party who is not guest. The sign-in method authomatically changes the party to a party based on the session. """ @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.sale and cart.sale.party and \ cart.sale.party.id == current_channel.anonymous_customer.id: return redirect(url_for('checkout.sign_in')) return function(*args, **kwargs) return wrapper class PaymentGateway(Model): __model_name__ = 'payment_gateway.gateway' provider = StringType() stripe_publishable_key = StringType() class PaymentProfile(Model): __model_name__ = 'party.payment_profile' party = ModelType('party.party') gateway = ModelType('payment_gateway.gateway') last_4_digits = StringType() expiry_month = StringType() expiry_year = StringType() rec_name = StringType()
Add expiry fields on card model
Add expiry fields on card model
Python
bsd-3-clause
joeirimpan/shop,joeirimpan/shop,joeirimpan/shop
python
## Code Before: """Checkout Models""" import functools from flask import redirect, url_for from fulfil_client.model import ModelType, StringType from shop.fulfilio import Model from shop.globals import current_cart, current_channel def not_empty_cart(function): @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.is_empty: return redirect(url_for('cart.view_cart')) return function(*args, **kwargs) return wrapper def sale_has_non_guest_party(function): """ Ensure that the sale has a party who is not guest. The sign-in method authomatically changes the party to a party based on the session. """ @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.sale and cart.sale.party and \ cart.sale.party.id == current_channel.anonymous_customer.id: return redirect(url_for('checkout.sign_in')) return function(*args, **kwargs) return wrapper class PaymentGateway(Model): __model_name__ = 'payment_gateway.gateway' provider = StringType() stripe_publishable_key = StringType() class PaymentProfile(Model): __model_name__ = 'party.payment_profile' party = ModelType('party.party') gateway = ModelType('payment_gateway.gateway') last_4_digits = StringType() rec_name = StringType() ## Instruction: Add expiry fields on card model ## Code After: """Checkout Models""" import functools from flask import redirect, url_for from fulfil_client.model import ModelType, StringType from shop.fulfilio import Model from shop.globals import current_cart, current_channel def not_empty_cart(function): @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.is_empty: return redirect(url_for('cart.view_cart')) return function(*args, **kwargs) return wrapper def sale_has_non_guest_party(function): """ Ensure that the sale has a party who is not guest. The sign-in method authomatically changes the party to a party based on the session. """ @functools.wraps(function) def wrapper(*args, **kwargs): cart = current_cart if cart.sale and cart.sale.party and \ cart.sale.party.id == current_channel.anonymous_customer.id: return redirect(url_for('checkout.sign_in')) return function(*args, **kwargs) return wrapper class PaymentGateway(Model): __model_name__ = 'payment_gateway.gateway' provider = StringType() stripe_publishable_key = StringType() class PaymentProfile(Model): __model_name__ = 'party.payment_profile' party = ModelType('party.party') gateway = ModelType('payment_gateway.gateway') last_4_digits = StringType() expiry_month = StringType() expiry_year = StringType() rec_name = StringType()
// ... existing code ... party = ModelType('party.party') gateway = ModelType('payment_gateway.gateway') last_4_digits = StringType() expiry_month = StringType() expiry_year = StringType() rec_name = StringType() // ... rest of the code ...
a0c2e64c92d89276d73b5e4ca31e10a352ab37f1
analyser/api.py
analyser/api.py
import os import requests from flask import Blueprint from utils.decorators import validate, require from utils.validators import validate_url from .parser import Parser endpoint = Blueprint('analyse_url', __name__) @endpoint.route('analyse/', methods=['POST']) @require('url') @validate({ 'url': validate_url }) def analyse_url(url): name, ext = os.path.splitext(url) parse = Parser(ext=ext[1:]) response = requests.get(url, stream=True) fields = [] for chunk in response.iter_lines(1024): fields = parse(chunk) if fields: break print fields return url
import os import json import requests import rethinkdb as r from flask import Blueprint, current_app from utils.decorators import validate, require from utils.validators import validate_url from krunchr.vendors.rethinkdb import db from .parser import Parser from .tasks import get_file endpoint = Blueprint('analyse_url', __name__) @endpoint.route('analyse/', methods=['POST']) @require('url') @validate({ 'url': validate_url }) def analyse_url(url): name, ext = os.path.splitext(url) parse = Parser(ext=ext[1:]) response = requests.get(url, stream=True) fields = [] for chunk in response.iter_lines(1024): fields = parse(chunk) if fields: break task_id = get_file.delay(url, current_app.config['DISCO_FILES']).task_id r.table('jobs').insert({ 'url': url, 'task_id': task_id, 'state': 'starting' }).run(db.conn) return json.dumps(fields)
Put job id in rethink db
Put job id in rethink db
Python
apache-2.0
vtemian/kruncher
python
## Code Before: import os import requests from flask import Blueprint from utils.decorators import validate, require from utils.validators import validate_url from .parser import Parser endpoint = Blueprint('analyse_url', __name__) @endpoint.route('analyse/', methods=['POST']) @require('url') @validate({ 'url': validate_url }) def analyse_url(url): name, ext = os.path.splitext(url) parse = Parser(ext=ext[1:]) response = requests.get(url, stream=True) fields = [] for chunk in response.iter_lines(1024): fields = parse(chunk) if fields: break print fields return url ## Instruction: Put job id in rethink db ## Code After: import os import json import requests import rethinkdb as r from flask import Blueprint, current_app from utils.decorators import validate, require from utils.validators import validate_url from krunchr.vendors.rethinkdb import db from .parser import Parser from .tasks import get_file endpoint = Blueprint('analyse_url', __name__) @endpoint.route('analyse/', methods=['POST']) @require('url') @validate({ 'url': validate_url }) def analyse_url(url): name, ext = os.path.splitext(url) parse = Parser(ext=ext[1:]) response = requests.get(url, stream=True) fields = [] for chunk in response.iter_lines(1024): fields = parse(chunk) if fields: break task_id = get_file.delay(url, current_app.config['DISCO_FILES']).task_id r.table('jobs').insert({ 'url': url, 'task_id': task_id, 'state': 'starting' }).run(db.conn) return json.dumps(fields)
... import os import json import requests import rethinkdb as r from flask import Blueprint, current_app from utils.decorators import validate, require from utils.validators import validate_url from krunchr.vendors.rethinkdb import db from .parser import Parser from .tasks import get_file endpoint = Blueprint('analyse_url', __name__) ... if fields: break task_id = get_file.delay(url, current_app.config['DISCO_FILES']).task_id r.table('jobs').insert({ 'url': url, 'task_id': task_id, 'state': 'starting' }).run(db.conn) return json.dumps(fields) ...
a0443783c880cf90b11886e3180e842e2c17a77a
tests/gtype.py
tests/gtype.py
import unittest from common import gobject, gtk class GTypeTest(unittest.TestCase): def testBoolType(self): store = gtk.ListStore(gobject.TYPE_BOOLEAN) assert store.get_column_type(0) == gobject.TYPE_BOOLEAN store = gtk.ListStore('gboolean') assert store.get_column_type(0) == gobject.TYPE_BOOLEAN store = gtk.ListStore(bool) assert store.get_column_type(0) == gobject.TYPE_BOOLEAN if __name__ == '__main__': unittest.main()
import unittest from common import gobject, gtk class GTypeTest(unittest.TestCase): def checkType(self, expected, *objects): # Silly method to check pyg_type_from_object store = gtk.ListStore(expected) val = store.get_column_type(0) assert val == expected, \ 'got %r while %r was expected' % (val, expected) for object in objects: store = gtk.ListStore(object) val = store.get_column_type(0) assert val == expected, \ 'got %r while %r was expected' % (val, expected) def testBool(self): self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool) def testInt(self): self.checkType(gobject.TYPE_INT, 'gint', int) def testInt64(self): self.checkType(gobject.TYPE_INT64, 'gint64') def testUint(self): self.checkType(gobject.TYPE_UINT, 'guint') def testUint64(self): self.checkType(gobject.TYPE_UINT64, 'guint64') def testLong(self): self.checkType(gobject.TYPE_LONG, 'glong', long) def testUlong(self): self.checkType(gobject.TYPE_ULONG, 'gulong') def testDouble(self): self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float) def testFloat(self): self.checkType(gobject.TYPE_FLOAT, 'gfloat') def testPyObject(self): self.checkType(gobject.TYPE_PYOBJECT, object) def testObject(self): self.checkType(gobject.TYPE_OBJECT) # XXX: Flags, Enums if __name__ == '__main__': unittest.main()
Test various other types aswell
Test various other types aswell
Python
lgpl-2.1
thiblahute/pygobject,atizo/pygobject,atizo/pygobject,nzjrs/pygobject,choeger/pygobject-cmake,Distrotech/pygobject,pexip/pygobject,davidmalcolm/pygobject,jdahlin/pygobject,MathieuDuponchelle/pygobject,GNOME/pygobject,davidmalcolm/pygobject,thiblahute/pygobject,davibe/pygobject,davibe/pygobject,jdahlin/pygobject,pexip/pygobject,alexef/pygobject,sfeltman/pygobject,sfeltman/pygobject,alexef/pygobject,GNOME/pygobject,Distrotech/pygobject,thiblahute/pygobject,atizo/pygobject,jdahlin/pygobject,davibe/pygobject,alexef/pygobject,Distrotech/pygobject,Distrotech/pygobject,MathieuDuponchelle/pygobject,davidmalcolm/pygobject,MathieuDuponchelle/pygobject,sfeltman/pygobject,nzjrs/pygobject,pexip/pygobject,davibe/pygobject,GNOME/pygobject,choeger/pygobject-cmake,choeger/pygobject-cmake,nzjrs/pygobject
python
## Code Before: import unittest from common import gobject, gtk class GTypeTest(unittest.TestCase): def testBoolType(self): store = gtk.ListStore(gobject.TYPE_BOOLEAN) assert store.get_column_type(0) == gobject.TYPE_BOOLEAN store = gtk.ListStore('gboolean') assert store.get_column_type(0) == gobject.TYPE_BOOLEAN store = gtk.ListStore(bool) assert store.get_column_type(0) == gobject.TYPE_BOOLEAN if __name__ == '__main__': unittest.main() ## Instruction: Test various other types aswell ## Code After: import unittest from common import gobject, gtk class GTypeTest(unittest.TestCase): def checkType(self, expected, *objects): # Silly method to check pyg_type_from_object store = gtk.ListStore(expected) val = store.get_column_type(0) assert val == expected, \ 'got %r while %r was expected' % (val, expected) for object in objects: store = gtk.ListStore(object) val = store.get_column_type(0) assert val == expected, \ 'got %r while %r was expected' % (val, expected) def testBool(self): self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool) def testInt(self): self.checkType(gobject.TYPE_INT, 'gint', int) def testInt64(self): self.checkType(gobject.TYPE_INT64, 'gint64') def testUint(self): self.checkType(gobject.TYPE_UINT, 'guint') def testUint64(self): self.checkType(gobject.TYPE_UINT64, 'guint64') def testLong(self): self.checkType(gobject.TYPE_LONG, 'glong', long) def testUlong(self): self.checkType(gobject.TYPE_ULONG, 'gulong') def testDouble(self): self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float) def testFloat(self): self.checkType(gobject.TYPE_FLOAT, 'gfloat') def testPyObject(self): self.checkType(gobject.TYPE_PYOBJECT, object) def testObject(self): self.checkType(gobject.TYPE_OBJECT) # XXX: Flags, Enums if __name__ == '__main__': unittest.main()
// ... existing code ... from common import gobject, gtk class GTypeTest(unittest.TestCase): def checkType(self, expected, *objects): # Silly method to check pyg_type_from_object store = gtk.ListStore(expected) val = store.get_column_type(0) assert val == expected, \ 'got %r while %r was expected' % (val, expected) for object in objects: store = gtk.ListStore(object) val = store.get_column_type(0) assert val == expected, \ 'got %r while %r was expected' % (val, expected) def testBool(self): self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool) def testInt(self): self.checkType(gobject.TYPE_INT, 'gint', int) def testInt64(self): self.checkType(gobject.TYPE_INT64, 'gint64') def testUint(self): self.checkType(gobject.TYPE_UINT, 'guint') def testUint64(self): self.checkType(gobject.TYPE_UINT64, 'guint64') def testLong(self): self.checkType(gobject.TYPE_LONG, 'glong', long) def testUlong(self): self.checkType(gobject.TYPE_ULONG, 'gulong') def testDouble(self): self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float) def testFloat(self): self.checkType(gobject.TYPE_FLOAT, 'gfloat') def testPyObject(self): self.checkType(gobject.TYPE_PYOBJECT, object) def testObject(self): self.checkType(gobject.TYPE_OBJECT) # XXX: Flags, Enums if __name__ == '__main__': unittest.main() // ... rest of the code ...
cc2e96a6030840c5221a2cce5042bedb69f8fc55
templates/openwisp2/urls.py
templates/openwisp2/urls.py
from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns admin.autodiscover() urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')), ] urlpatterns += staticfiles_urlpatterns()
from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns admin.autodiscover() admin.site.site_url = None urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')), ] urlpatterns += staticfiles_urlpatterns()
Hide "view site" link in admin
Hide "view site" link in admin
Python
bsd-3-clause
nemesisdesign/ansible-openwisp2,openwisp/ansible-openwisp2,ritwickdsouza/ansible-openwisp2
python
## Code Before: from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns admin.autodiscover() urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')), ] urlpatterns += staticfiles_urlpatterns() ## Instruction: Hide "view site" link in admin ## Code After: from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns admin.autodiscover() admin.site.site_url = None urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')), ] urlpatterns += staticfiles_urlpatterns()
... from django.contrib.staticfiles.urls import staticfiles_urlpatterns admin.autodiscover() admin.site.site_url = None urlpatterns = [ ...
5c217b60fed39ed2ac6a2e8b788df4a74fa68878
arch/ia64/kernel/irq_lsapic.c
arch/ia64/kernel/irq_lsapic.c
/* * LSAPIC Interrupt Controller * * This takes care of interrupts that are generated by the CPU's * internal Streamlined Advanced Programmable Interrupt Controller * (LSAPIC), such as the ITC and IPI interrupts. * * Copyright (C) 1999 VA Linux Systems * Copyright (C) 1999 Walt Drummond <[email protected]> * Copyright (C) 2000 Hewlett-Packard Co * Copyright (C) 2000 David Mosberger-Tang <[email protected]> */ #include <linux/sched.h> #include <linux/irq.h> static unsigned int lsapic_noop_startup (unsigned int irq) { return 0; } static void lsapic_noop (unsigned int irq) { /* nothing to do... */ } static int lsapic_retrigger(unsigned int irq) { ia64_resend_irq(irq); return 1; } struct irq_chip irq_type_ia64_lsapic = { .name = "LSAPIC", .startup = lsapic_noop_startup, .shutdown = lsapic_noop, .enable = lsapic_noop, .disable = lsapic_noop, .ack = lsapic_noop, .retrigger = lsapic_retrigger, };
/* * LSAPIC Interrupt Controller * * This takes care of interrupts that are generated by the CPU's * internal Streamlined Advanced Programmable Interrupt Controller * (LSAPIC), such as the ITC and IPI interrupts. * * Copyright (C) 1999 VA Linux Systems * Copyright (C) 1999 Walt Drummond <[email protected]> * Copyright (C) 2000 Hewlett-Packard Co * Copyright (C) 2000 David Mosberger-Tang <[email protected]> */ #include <linux/sched.h> #include <linux/irq.h> static unsigned int lsapic_noop_startup (struct irq_data *data) { return 0; } static void lsapic_noop (struct irq_data *data) { /* nothing to do... */ } static int lsapic_retrigger(struct irq_data *data) { ia64_resend_irq(data->irq); return 1; } struct irq_chip irq_type_ia64_lsapic = { .name = "LSAPIC", .irq_startup = lsapic_noop_startup, .irq_shutdown = lsapic_noop, .irq_enable = lsapic_noop, .irq_disable = lsapic_noop, .irq_ack = lsapic_noop, .irq_retrigger = lsapic_retrigger, };
Convert lsapic to new irq_chip functions
ia64: Convert lsapic to new irq_chip functions Signed-off-by: Thomas Gleixner <[email protected]>
C
mit
KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs
c
## Code Before: /* * LSAPIC Interrupt Controller * * This takes care of interrupts that are generated by the CPU's * internal Streamlined Advanced Programmable Interrupt Controller * (LSAPIC), such as the ITC and IPI interrupts. * * Copyright (C) 1999 VA Linux Systems * Copyright (C) 1999 Walt Drummond <[email protected]> * Copyright (C) 2000 Hewlett-Packard Co * Copyright (C) 2000 David Mosberger-Tang <[email protected]> */ #include <linux/sched.h> #include <linux/irq.h> static unsigned int lsapic_noop_startup (unsigned int irq) { return 0; } static void lsapic_noop (unsigned int irq) { /* nothing to do... */ } static int lsapic_retrigger(unsigned int irq) { ia64_resend_irq(irq); return 1; } struct irq_chip irq_type_ia64_lsapic = { .name = "LSAPIC", .startup = lsapic_noop_startup, .shutdown = lsapic_noop, .enable = lsapic_noop, .disable = lsapic_noop, .ack = lsapic_noop, .retrigger = lsapic_retrigger, }; ## Instruction: ia64: Convert lsapic to new irq_chip functions Signed-off-by: Thomas Gleixner <[email protected]> ## Code After: /* * LSAPIC Interrupt Controller * * This takes care of interrupts that are generated by the CPU's * internal Streamlined Advanced Programmable Interrupt Controller * (LSAPIC), such as the ITC and IPI interrupts. * * Copyright (C) 1999 VA Linux Systems * Copyright (C) 1999 Walt Drummond <[email protected]> * Copyright (C) 2000 Hewlett-Packard Co * Copyright (C) 2000 David Mosberger-Tang <[email protected]> */ #include <linux/sched.h> #include <linux/irq.h> static unsigned int lsapic_noop_startup (struct irq_data *data) { return 0; } static void lsapic_noop (struct irq_data *data) { /* nothing to do... */ } static int lsapic_retrigger(struct irq_data *data) { ia64_resend_irq(data->irq); return 1; } struct irq_chip irq_type_ia64_lsapic = { .name = "LSAPIC", .irq_startup = lsapic_noop_startup, .irq_shutdown = lsapic_noop, .irq_enable = lsapic_noop, .irq_disable = lsapic_noop, .irq_ack = lsapic_noop, .irq_retrigger = lsapic_retrigger, };
... #include <linux/irq.h> static unsigned int lsapic_noop_startup (struct irq_data *data) { return 0; } static void lsapic_noop (struct irq_data *data) { /* nothing to do... */ } static int lsapic_retrigger(struct irq_data *data) { ia64_resend_irq(data->irq); return 1; } struct irq_chip irq_type_ia64_lsapic = { .name = "LSAPIC", .irq_startup = lsapic_noop_startup, .irq_shutdown = lsapic_noop, .irq_enable = lsapic_noop, .irq_disable = lsapic_noop, .irq_ack = lsapic_noop, .irq_retrigger = lsapic_retrigger, }; ...
c65306f78f1eb97714fd2086d20ff781faf78c3a
problems/starterpackages/SteinerStarter.py
problems/starterpackages/SteinerStarter.py
import math import sys # A helper class for working with points. class Point: def __init__(self, x, y): self.x = x self.y = y class Edge: def __init__(self, p1, p2): self.p1 = p1 self.p2 = p2 def getProblem(filename): pts = [] with open(filename, 'r') as input: for line in input: l = line.split(' ') pts.append(Point(float(l[0]), float(l[1]))) return pts def outputSolutionsToFile(edges): f = open("out.txt", 'w') for a in edges: f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n') pts = [] # Where you will find the input points. edges = [] # Edges should be added to this. # Your code here. This sample code just connects the points in the order that they are given: for a in range(1, len(pts)): edges.append(Edge(pts[a-1], pts[a]))
import math import sys # A helper class for working with points. class Point: def __init__(self, x, y): self.x = x self.y = y class Edge: def __init__(self, p1, p2): self.p1 = p1 self.p2 = p2 # Gets a problem from a file as an list of points. def getProblem(filename): pts = [] with open(filename, 'r') as input: for line in input: l = line.split(' ') pts.append(Point(float(l[0]), float(l[1]))) return pts # Outputs a list of edges to file "out.txt" for submission. def outputSolutionsToFile(edges): f = open("out.txt", 'w') for a in edges: f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n') pts = getProblem("st.txt") # Where you will find the input points. edges = [] # Edges should be added to this. # Your code here. This sample code just connects the points in the order that they are given: for a in range(1, len(pts)): edges.append(Edge(pts[a-1], pts[a])) outputSolutionsToFile(edges)
Make py starterpackage more like java/c++ one
Make py starterpackage more like java/c++ one
Python
mit
HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL
python
## Code Before: import math import sys # A helper class for working with points. class Point: def __init__(self, x, y): self.x = x self.y = y class Edge: def __init__(self, p1, p2): self.p1 = p1 self.p2 = p2 def getProblem(filename): pts = [] with open(filename, 'r') as input: for line in input: l = line.split(' ') pts.append(Point(float(l[0]), float(l[1]))) return pts def outputSolutionsToFile(edges): f = open("out.txt", 'w') for a in edges: f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n') pts = [] # Where you will find the input points. edges = [] # Edges should be added to this. # Your code here. This sample code just connects the points in the order that they are given: for a in range(1, len(pts)): edges.append(Edge(pts[a-1], pts[a])) ## Instruction: Make py starterpackage more like java/c++ one ## Code After: import math import sys # A helper class for working with points. class Point: def __init__(self, x, y): self.x = x self.y = y class Edge: def __init__(self, p1, p2): self.p1 = p1 self.p2 = p2 # Gets a problem from a file as an list of points. def getProblem(filename): pts = [] with open(filename, 'r') as input: for line in input: l = line.split(' ') pts.append(Point(float(l[0]), float(l[1]))) return pts # Outputs a list of edges to file "out.txt" for submission. def outputSolutionsToFile(edges): f = open("out.txt", 'w') for a in edges: f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n') pts = getProblem("st.txt") # Where you will find the input points. edges = [] # Edges should be added to this. # Your code here. This sample code just connects the points in the order that they are given: for a in range(1, len(pts)): edges.append(Edge(pts[a-1], pts[a])) outputSolutionsToFile(edges)
... self.p1 = p1 self.p2 = p2 # Gets a problem from a file as an list of points. def getProblem(filename): pts = [] with open(filename, 'r') as input: ... pts.append(Point(float(l[0]), float(l[1]))) return pts # Outputs a list of edges to file "out.txt" for submission. def outputSolutionsToFile(edges): f = open("out.txt", 'w') for a in edges: f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n') pts = getProblem("st.txt") # Where you will find the input points. edges = [] # Edges should be added to this. # Your code here. This sample code just connects the points in the order that they are given: for a in range(1, len(pts)): edges.append(Edge(pts[a-1], pts[a])) outputSolutionsToFile(edges) ...
c2d681f0df11d2111fe1ade63a0c045f9c9ebad7
aws_profile.py
aws_profile.py
import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
Fix profile script to correctly use the credentials file
Fix profile script to correctly use the credentials file
Python
mit
mivok/tools,mivok/tools,mivok/tools,mivok/tools
python
## Code Before: import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True) ## Instruction: Fix profile script to correctly use the credentials file ## Code After: import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
// ... existing code ... import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') // ... rest of the code ...
da0dc08d8fdd18a64ecc883404553c86de6a726c
test/functional/feature_shutdown.py
test/functional/feature_shutdown.py
"""Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) Thread(target=test_long_call, args=(node,)).start() # wait 1 second to ensure event loop waits for current connections to close self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
"""Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy, wait_until from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) # Force connection establishment by executing a dummy command. node.getblockcount() Thread(target=test_long_call, args=(node,)).start() # Wait until the server is executing the above `waitfornewblock`. wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2) # Wait 1 second after requesting shutdown but not before the `stop` call # finishes. This is to ensure event loop waits for current connections # to close. self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
Remove race between connecting and shutdown on separate connections
qa: Remove race between connecting and shutdown on separate connections
Python
mit
DigitalPandacoin/pandacoin,DigitalPandacoin/pandacoin,peercoin/peercoin,peercoin/peercoin,DigitalPandacoin/pandacoin,peercoin/peercoin,peercoin/peercoin,DigitalPandacoin/pandacoin,DigitalPandacoin/pandacoin,peercoin/peercoin,DigitalPandacoin/pandacoin,peercoin/peercoin
python
## Code Before: """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) Thread(target=test_long_call, args=(node,)).start() # wait 1 second to ensure event loop waits for current connections to close self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main() ## Instruction: qa: Remove race between connecting and shutdown on separate connections ## Code After: """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy, wait_until from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) # Force connection establishment by executing a dummy command. node.getblockcount() Thread(target=test_long_call, args=(node,)).start() # Wait until the server is executing the above `waitfornewblock`. wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2) # Wait 1 second after requesting shutdown but not before the `stop` call # finishes. This is to ensure event loop waits for current connections # to close. self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
// ... existing code ... """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy, wait_until from threading import Thread def test_long_call(node): // ... modified code ... def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) # Force connection establishment by executing a dummy command. node.getblockcount() Thread(target=test_long_call, args=(node,)).start() # Wait until the server is executing the above `waitfornewblock`. wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2) # Wait 1 second after requesting shutdown but not before the `stop` call # finishes. This is to ensure event loop waits for current connections # to close. self.stop_node(0, wait=1000) if __name__ == '__main__': // ... rest of the code ...
6210a7c68844602ee390bcce61dbb637910a3c6b
include/images/SkBitmapRegionDecoder.h
include/images/SkBitmapRegionDecoder.h
class SkBitmapRegionDecoder { public: SkBitmapRegionDecoder(SkImageDecoder *decoder, int width, int height) { fDecoder = decoder; fWidth = width; fHeight = height; } virtual ~SkBitmapRegionDecoder() { delete fDecoder; } virtual bool decodeRegion(SkBitmap* bitmap, SkIRect rect, SkBitmap::Config pref, int sampleSize); virtual int getWidth() { return fWidth; } virtual int getHeight() { return fHeight; } virtual SkImageDecoder* getDecoder() { return fDecoder; } private: SkImageDecoder *fDecoder; int fWidth; int fHeight; }; #endif
class SkBitmapRegionDecoder { public: SkBitmapRegionDecoder(SkImageDecoder *decoder, SkStream *stream, int width, int height) { fDecoder = decoder; fStream = stream; fWidth = width; fHeight = height; } virtual ~SkBitmapRegionDecoder() { delete fDecoder; fStream->unref(); } virtual bool decodeRegion(SkBitmap* bitmap, SkIRect rect, SkBitmap::Config pref, int sampleSize); virtual int getWidth() { return fWidth; } virtual int getHeight() { return fHeight; } virtual SkImageDecoder* getDecoder() { return fDecoder; } private: SkImageDecoder *fDecoder; SkStream *fStream; int fWidth; int fHeight; }; #endif
Fix 3510563: memory leak in BitmapRegionDecoder.
Fix 3510563: memory leak in BitmapRegionDecoder. Change-Id: I30b3a3806f4484d95602539def1a77a366560fdf
C
bsd-3-clause
IllusionRom-deprecated/android_platform_external_skia,AOSPA-L/android_external_skia,VRToxin-AOSP/android_external_skia,MinimalOS/external_skia,VentureROM-L/android_external_skia,MarshedOut/android_external_skia,mozilla-b2g/external_skia,RockchipOpensourceCommunity/external_skia,Infusion-OS/android_external_skia,Plain-Andy/android_platform_external_skia,byterom/android_external_skia,MinimalOS/android_external_skia,MonkeyZZZZ/platform_external_skia,Infusion-OS/android_external_skia,PurityPlus/android_external_skia,omapzoom/platform-external-skia,upndwn4par/android_external_skia,Hybrid-Rom/external_skia,sudosurootdev/external_skia,SlimSaber/android_external_skia,suyouxin/android_external_skia,VanirAOSP/external_skia,byterom/android_external_skia,AOSPB/external_skia,Gateworks/skia,AOSP-YU/platform_external_skia,Plain-Andy/android_platform_external_skia,FusionSP/android_external_skia,AOSP-YU/platform_external_skia,CNA/android_external_skia,RadonX-ROM/external_skia,w3nd1go/android_external_skia,MinimalOS-AOSP/platform_external_skia,geekboxzone/lollipop_external_skia,TeslaProject/external_skia,pacerom/external_skia,TeamTwisted/external_skia,PurityROM/platform_external_skia,roalex/android_external_skia,MinimalOS-AOSP/platform_external_skia,MinimalOS/external_skia,brothaedhung/external_skia,android-ia/platform_external_skia,brothaedhung/external_skia,Plain-Andy/android_platform_external_skia,wildermason/external_skia,freerunner/platform_external_skia,GladeRom/android_external_skia,StelixROM/android_external_skia,OptiPop/external_skia,Euphoria-OS-Legacy/android_external_skia,houst0nn/external_skia,ench0/external_skia,TeamJB/linaro_external_skia,Mahdi-Rom/android_external_skia,AOSPB/external_skia,TeamJB/linaro_external_skia,OptiPop/external_skia,VentureROM-L/android_external_skia,RadonX-ROM/external_skia,Root-Box/external_skia,AOKP/external_skia,PurityROM/platform_external_skia,UnicornButter/external_skia,Fusion-Rom/android_external_skia,OptiPop/external_skia,OneRom/external_skia,InfinitiveOS/external_skia,TeamTwisted/external_skia,houst0nn/external_skia,DesolationStaging/android_external_skia,xzzz9097/android_external_skia,sombree/android_external_skia,mrgatesjunior/external_skia,aosp-mirror/platform_external_skia,Nico60/external_skia,Tesla-Redux/android_external_skia,RadonX-ROM/external_skia,NamelessRom/android_external_skia,fire855/android_external_skia,PAC-ROM/android_external_skia,Android4SAM/platform_external_skia,mydongistiny/android_external_skia,parmv6/external_skia,sigysmund/platform_external_skia,invisiblek/android_external_skia,Plain-Andy/android_platform_external_skia,AsteroidOS/android_external_skia,C-RoM-KK/android_external_skia,VRToxin-AOSP/android_external_skia,zhaochengw/platform_external_skia,xzzz9097/android_external_skia,SuperNexus/android_external_skia,ench0/external_skia,AsteroidOS/android_external_skia,TeamTwisted/external_skia,yinquan529/platform-external-skia,aospo/platform_external_skia,MinimalOS/external_skia,Fusion-Rom/android_external_skia,codeaurora-unoffical/platform-external-skia,StelixROM/android_external_skia,AOSPA-L/android_external_skia,DesolationStaging/android_external_skia,fire855/android_external_skia,BrokenROM/external_skia,androidarmv6/android_external_skia,omapzoom/platform-external-skia,aways-CR/android_external_skia,byterom/android_external_skia,codewalkerster/android_external_skia,geekboxzone/lollipop_external_skia,RadonX-ROM/external_skia,aospo/platform_external_skia,Euphoria-OS-Legacy/android_external_skia,yinquan529/platform-external-skia,AOSPB/external_skia,FusionSP/android_external_skia,TeamExodus/external_skia,PAC-ROM/android_external_skia,ChameleonOS/android_external_skia,MarshedOut/android_external_skia,InfinitiveOS/external_skia,VanirAOSP/external_skia,GladeRom/android_external_skia,Hybrid-Rom/external_skia,aospo/platform_external_skia,X-ROM/android_external_skia,RadonX-ROM/external_skia,Khaon/android_external_skia,PurityROM/platform_external_skia,Khaon/android_external_skia,Mahdi-Rom/android_external_skia,VentureROM-L/android_external_skia,Purity-Lollipop/platform_external_skia,byterom/android_external_skia,FusionSP/android_external_skia,Omegaphora/external_skia,wildermason/external_skia,yinquan529/platform-external-skia,Purity-Lollipop/platform_external_skia,AOKPSaber/android_external_skia,AOSP-YU/platform_external_skia,xzzz9097/android_external_skia,boulzordev/android_external_skia,sombree/android_external_skia,TripNRaVeR/android_external_skia,cubox-i/android_external_skia,AndroidOpenDevelopment/android_external_skia,ctiao/platform-external-skia,DesolationStaging/android_external_skia,cubox-i/android_external_skia,ParanoidAndroid/android_external_skia,timduru/platform-external-skia,boulzordev/android_external_skia,mozilla-b2g/external_skia,Purity-Lollipop/platform_external_skia,upndwn4par/android_external_skia,ModdedPA/android_external_skia,AndroidOpenDevelopment/android_external_skia,opensourcechipspark/platform_external_skia,androidarmv6/android_external_skia,SuperNexus/android_external_skia,MinimalOS-AOSP/platform_external_skia,sudosurootdev/external_skia,freerunner/platform_external_skia,mozilla-b2g/external_skia,androidarmv6/android_external_skia,TeamEOS/external_skia,SuperNexus/android_external_skia,AndroidOpenDevelopment/android_external_skia,TeamTwisted/external_skia,invisiblek/android_external_skia,OneRom/external_skia,aospo/platform_external_skia,DesolationStaging/android_external_skia,temasek/android_external_skia,houst0nn/external_skia,HealthyHoney/temasek_SKIA,StelixROM/android_external_skia,Euphoria-OS-Legacy/android_external_skia,MonkeyZZZZ/platform_external_skia,sudosurootdev/external_skia,Asteroid-Project/android_external_skia,android-ia/platform_external_skia,MinimalOS/android_external_skia,xhteam/external-skia,MarshedOut/android_external_skia,ParanoidAndroid/android_external_skia,AOSPB/external_skia,MagicDevTeam/android_external_skia,MinimalOS-AOSP/platform_external_skia,aosp-mirror/platform_external_skia,HealthyHoney/temasek_SKIA,BrokenROM/external_skia,Pure-Aosp/android_external_skia,nfxosp/platform_external_skia,shashlik/android-skia,ctiao/platform-external-skia,invisiblek/android_external_skia,StelixROM/android_external_skia,SlimSaber/android_external_skia,OneRom/external_skia,DesolationStaging/android_external_skia,sombree/android_external_skia,invisiblek/android_external_skia,mozilla-b2g/external_skia,TeamEOS/external_skia,AOSPA-L/android_external_skia,TeamTwisted/external_skia,OneRom/external_skia,InfinitiveOS/external_skia,Pure-Aosp/android_external_skia,CNA/android_external_skia,AsteroidOS/android_external_skia,xzzz9097/android_external_skia,androidarmv6/android_external_skia,sudosurootdev/external_skia,AndroidOpenDevelopment/android_external_skia,InfinitiveOS/external_skia,TeamExodus/external_skia,MinimalOS-AOSP/platform_external_skia,codeaurora-unoffical/platform-external-skia,AsteroidOS/android_external_skia,AOSP-YU/platform_external_skia,temasek/android_external_skia,nfxosp/platform_external_skia,Nico60/external_skia,CandyKat/external_skia,freerunner/platform_external_skia,F-AOSP/platform_external_skia,geekboxzone/mmallow_external_skia,Purity-Lollipop/platform_external_skia,xhteam/external-skia,parmv6/external_skia,wildermason/external_skia,Hybrid-Rom/external_skia,F-AOSP/platform_external_skia,pacerom/external_skia,OneRom/external_skia,Android-AOSP/external_skia,temasek/android_external_skia,PAC-ROM/android_external_skia,IllusionRom-deprecated/android_platform_external_skia,TripNRaVeR/android_external_skia,temasek/android_external_skia,nfxosp/platform_external_skia,UnicornButter/external_skia,ChameleonOS/android_external_skia,Hikari-no-Tenshi/android_external_skia,TeslaProject/external_skia,Fusion-Rom/android_external_skia,OneRom/external_skia,aways-CR/android_external_skia,aosp-mirror/platform_external_skia,yinquan529/platform-external-skia,aosp-mirror/platform_external_skia,Infinitive-OS/platform_external_skia,nfxosp/platform_external_skia,TripNRaVeR/android_external_skia,sudosurootdev/external_skia,TeamExodus/external_skia,PurityROM/platform_external_skia,MinimalOS/external_skia,fire855/android_external_skia,OptiPop/external_skia,timduru/platform-external-skia,pacerom/external_skia,IllusionRom-deprecated/android_platform_external_skia,codewalkerster/android_external_skia,VRToxin-AOSP/android_external_skia,MarshedOut/android_external_skia,RockchipOpensourceCommunity/external_skia,BrokenROM/external_skia,android-ia/platform_external_skia,Hybrid-Rom/external_skia,MonkeyZZZZ/platform_external_skia,roalex/android_external_skia,w3nd1go/android_external_skia,zhaochengw/platform_external_skia,opensourcechipspark/platform_external_skia,TeslaOS/android_external_skia,Nico60/external_skia,bleeding-rom/android_external_skia,geekboxzone/lollipop_external_skia,Hikari-no-Tenshi/android_external_skia,Mahdi-Rom/android_external_skia,CandyKat/external_skia,sigysmund/platform_external_skia,houst0nn/external_skia,NamelessRom/android_external_skia,shashlik/android-skia,YUPlayGodDev/platform_external_skia,codeaurora-unoffical/platform-external-skia,C-RoM-KK/android_external_skia,sigysmund/platform_external_skia,roalex/android_external_skia,F-AOSP/platform_external_skia,fire855/android_external_skia,PurityPlus/android_external_skia,TeslaProject/external_skia,MonkeyZZZZ/platform_external_skia,mydongistiny/android_external_skia,TeamJB/linaro_external_skia,Asteroid-Project/android_external_skia,Tesla-Redux/android_external_skia,YUPlayGodDev/platform_external_skia,MonkeyZZZZ/platform_external_skia,AOSP-YU/platform_external_skia,embest-tech/android_external_skia,UBERMALLOW/external_skia,TeamTwisted/external_skia,MinimalOS/android_external_skia,sigysmund/platform_external_skia,DesolationStaging/android_external_skia,invisiblek/android_external_skia,geekboxzone/mmallow_external_skia,UBERMALLOW/external_skia,C-RoM-KK/android_external_skia,houst0nn/external_skia,timduru/platform-external-skia,omapzoom/platform-external-skia,Omegaphora/external_skia,cubox-i/android_external_skia,TeslaOS/android_external_skia,TeslaProject/external_skia,cuboxi/android_external_skia,MagicDevTeam/android_external_skia,AOSPA-L/android_external_skia,AOSPB/external_skia,Purity-Lollipop/platform_external_skia,PAC-ROM/android_external_skia,Pure-Aosp/android_external_skia,roalex/android_external_skia,AOSPA-L/android_external_skia,Purity-Lollipop/platform_external_skia,AOKP/external_skia,SlimSaber/android_external_skia,TeslaProject/external_skia,Root-Box/external_skia,InfinitiveOS/external_skia,thiz11/platform_external_skia,SaleJumper/android-source-browsing.platform--external--skia,MinimalOS-AOSP/platform_external_skia,GladeRom/android_external_skia,AOSPA-L/android_external_skia,PurityPlus/android_external_skia,UltimatumKang/android_external_skia-1,Asteroid-Project/android_external_skia,Fusion-Rom/android_external_skia,temasek/android_external_skia,sudosurootdev/external_skia,sombree/android_external_skia,androidarmv6/android_external_skia,UBERMALLOW/external_skia,DesolationStaging/android_external_skia,geekboxzone/lollipop_external_skia,MarshedOut/android_external_skia,sigysmund/platform_external_skia,BrokenROM/external_skia,Omegaphora/external_skia,Asteroid-Project/android_external_skia,MinimalOS/android_external_skia,Root-Box/external_skia,w3nd1go/android_external_skia,shashlik/android-skia,AOSPA-L/android_external_skia,codeaurora-unoffical/platform-external-skia,sigysmund/platform_external_skia,omapzoom/platform-external-skia,wildermason/external_skia,ench0/external_skia,yinquan529/platform-external-skia,NamelessRom/android_external_skia,Android4SAM/platform_external_skia,temasek/android_external_skia,AOKP/external_skia,marlontoe/android_external_skia,codeaurora-unoffical/platform-external-skia,androidarmv6/android_external_skia,MarshedOut/android_external_skia,AOSPB/external_skia,geekboxzone/mmallow_external_skia,Tesla-Redux/android_external_skia,xzzz9097/android_external_skia,Omegaphora/external_skia,InsomniaAOSP/platform_external_skia,upndwn4par/android_external_skia,AOSP-S4-KK/platform_external_skia,androidarmv6/android_external_skia,Nico60/external_skia,UBERMALLOW/external_skia,TeamExodus/external_skia,FusionSP/android_external_skia,VanirAOSP/external_skia,SlimSaber/android_external_skia,fire855/android_external_skia,Purity-Lollipop/platform_external_skia,upndwn4par/android_external_skia,mrgatesjunior/external_skia,MagicDevTeam/android_external_skia,SlimSaber/android_external_skia,aosp-mirror/platform_external_skia,FusionSP/android_external_skia,VanirAOSP/external_skia,Plain-Andy/android_platform_external_skia,RadonX-ROM/external_skia,CandyKat/external_skia,Omegaphora/external_skia,parmv6/external_skia,BrokenROM/external_skia,zhaochengw/platform_external_skia,MarshedOut/android_external_skia,aosp-mirror/platform_external_skia,Hikari-no-Tenshi/android_external_skia,geekboxzone/lollipop_external_skia,CNA/android_external_skia,pacerom/external_skia,InfinitiveOS/external_skia,boulzordev/android_external_skia,HealthyHoney/temasek_SKIA,SuperNexus/android_external_skia,MinimalOS/external_skia,TeamEOS/external_skia,bleeding-rom/android_external_skia,TeamExodus/external_skia,Fusion-Rom/android_external_skia,opensourcechipspark/platform_external_skia,X-ROM/android_external_skia,AndroidOpenDevelopment/android_external_skia,Root-Box/external_skia,HealthyHoney/temasek_SKIA,marlontoe/android_external_skia,MyAOSP/external_skia,Plain-Andy/android_platform_external_skia,temasek/android_external_skia,TeslaOS/android_external_skia,Euphoria-OS-Legacy/android_external_skia,MyAOSP/external_skia,android-ia/platform_external_skia,codewalkerster/android_external_skia,zhaochengw/platform_external_skia,sombree/android_external_skia,mydongistiny/android_external_skia,TeamTwisted/external_skia,DesolationStaging/android_external_skia,wildermason/external_skia,timduru/platform-external-skia,MinimalOS/android_external_skia,aosp-mirror/platform_external_skia,SlimSaber/android_external_skia,mydongistiny/android_external_skia,PAC-ROM/android_external_skia,suyouxin/android_external_skia,sombree/android_external_skia,YUPlayGodDev/platform_external_skia,invisiblek/android_external_skia,wildermason/external_skia,GladeRom/android_external_skia,AOSPA-L/android_external_skia,shashlik/android-skia,Infusion-OS/android_external_skia,mrgatesjunior/external_skia,Android-AOSP/external_skia,TeamExodus/external_skia,MinimalOS-AOSP/platform_external_skia,RadonX-ROM/external_skia,bleeding-rom/android_external_skia,Khaon/android_external_skia,BrokenROM/external_skia,LOSP/external_skia,boulzordev/android_external_skia,sudosurootdev/external_skia,cuboxi/android_external_skia,zhaochengw/platform_external_skia,Omegaphora/external_skia,AOSPB/external_skia,aospo/platform_external_skia,BrokenROM/external_skia,thiz11/platform_external_skia,HealthyHoney/temasek_SKIA,fire855/android_external_skia,UltimatumKang/android_external_skia-1,Infusion-OS/android_external_skia,MagicDevTeam/android_external_skia,MonkeyZZZZ/platform_external_skia,spezi77/android_external_skia,android-ia/platform_external_skia,TeamNyx/external_skia,aways-CR/android_external_skia,CandyKat/external_skia,byterom/android_external_skia,w3nd1go/android_external_skia,MonkeyZZZZ/platform_external_skia,RockchipOpensourceCommunity/external_skia,MinimalOS/external_skia,Infusion-OS/android_external_skia,TeslaOS/android_external_skia,nfxosp/platform_external_skia,suyouxin/android_external_skia,codeaurora-unoffical/platform-external-skia,TeamEOS/external_skia,Khaon/android_external_skia,android-ia/platform_external_skia,mozilla-b2g/external_skia,F-AOSP/platform_external_skia,pacerom/external_skia,HealthyHoney/temasek_SKIA,brothaedhung/external_skia,Hybrid-Rom/external_skia,parmv6/external_skia,Asteroid-Project/android_external_skia,AsteroidOS/android_external_skia,VRToxin-AOSP/android_external_skia,Tesla-Redux/android_external_skia,TeamJB/linaro_external_skia,ench0/external_skia,C-RoM-KK/android_external_skia,embest-tech/android_external_skia,MonkeyZZZZ/platform_external_skia,ench0/external_skia,geekboxzone/lollipop_external_skia,bleeding-rom/android_external_skia,TripNRaVeR/android_external_skia,cuboxi/android_external_skia,MinimalOS/android_external_skia,F-AOSP/platform_external_skia,mydongistiny/android_external_skia,Android4SAM/platform_external_skia,YUPlayGodDev/platform_external_skia,LOSP/external_skia,AOSPB/external_skia,GladeRom/android_external_skia,InsomniaAOSP/platform_external_skia,mozilla-b2g/external_skia,AOSP-S4-KK/platform_external_skia,thiz11/platform_external_skia,omapzoom/platform-external-skia,Hikari-no-Tenshi/android_external_skia,mydongistiny/android_external_skia,TeamNyx/external_skia,mozilla-b2g/external_skia,AOSP-S4-KK/platform_external_skia,timduru/platform-external-skia,TeslaOS/android_external_skia,xhteam/external-skia,androidarmv6/android_external_skia,shashlik/android-skia,AsteroidOS/android_external_skia,w3nd1go/android_external_skia,SaleJumper/android-source-browsing.platform--external--skia,boulzordev/android_external_skia,marlontoe/android_external_skia,StelixROM/android_external_skia,byterom/android_external_skia,TeslaProject/external_skia,TeamExodus/external_skia,Infinitive-OS/platform_external_skia,VRToxin-AOSP/android_external_skia,fire855/android_external_skia,VanirAOSP/external_skia,xzzz9097/android_external_skia,SlimSaber/android_external_skia,VRToxin-AOSP/android_external_skia,AOSP-YU/platform_external_skia,boulzordev/android_external_skia,ctiao/platform-external-skia,OneRom/external_skia,cuboxi/android_external_skia,TeslaProject/external_skia,UltimatumKang/android_external_skia-1,TeamExodus/external_skia,aways-CR/android_external_skia,sigysmund/platform_external_skia,Euphoria-OS-Legacy/android_external_skia,GladeRom/android_external_skia,TeamBliss-LP/android_external_skia,StelixROM/android_external_skia,UBERMALLOW/external_skia,F-AOSP/platform_external_skia,ctiao/platform-external-skia,suyouxin/android_external_skia,boulzordev/android_external_skia,TeamBliss-LP/android_external_skia,MyAOSP/external_skia,ctiao/platform-external-skia,MinimalOS-AOSP/platform_external_skia,sombree/android_external_skia,aospo/platform_external_skia,BrokenROM/external_skia,YUPlayGodDev/platform_external_skia,invisiblek/android_external_skia,MyAOSP/external_skia,Infusion-OS/android_external_skia,aosp-mirror/platform_external_skia,houst0nn/external_skia,ench0/external_skia,TeamJB/linaro_external_skia,wildermason/external_skia,Gateworks/skia,aospo/platform_external_skia,mrgatesjunior/external_skia,RockchipOpensourceCommunity/external_skia,Omegaphora/external_skia,PAC-ROM/android_external_skia,UBERMALLOW/external_skia,Hybrid-Rom/external_skia,ParanoidAndroid/android_external_skia,w3nd1go/android_external_skia,InsomniaROM/platform_external_skia,AndroidOpenDevelopment/android_external_skia,Khaon/android_external_skia,mydongistiny/android_external_skia,OptiPop/external_skia,fire855/android_external_skia,geekboxzone/mmallow_external_skia,Pure-Aosp/android_external_skia,MarshedOut/android_external_skia,F-AOSP/platform_external_skia,TeslaOS/android_external_skia,OptiPop/external_skia,geekboxzone/mmallow_external_skia,NamelessRom/android_external_skia,ench0/external_skia,MarshedOut/android_external_skia,InfinitiveOS/external_skia,TeamEOS/external_skia,Euphoria-OS-Legacy/android_external_skia,pacerom/external_skia,Android-AOSP/external_skia,byterom/android_external_skia,RadonX-ROM/external_skia,nfxosp/platform_external_skia,mydongistiny/android_external_skia,wildermason/external_skia,TeamTwisted/external_skia,geekboxzone/lollipop_external_skia,timduru/platform-external-skia,Infinitive-OS/platform_external_skia,TeamBliss-LP/android_external_skia,GladeRom/android_external_skia,ChameleonOS/android_external_skia,Infinitive-OS/platform_external_skia,suyouxin/android_external_skia,Infinitive-OS/platform_external_skia,Pure-Aosp/android_external_skia,w3nd1go/android_external_skia,AOSP-YU/platform_external_skia,LOSP/external_skia,TeamBliss-LP/android_external_skia,spezi77/android_external_skia,Mahdi-Rom/android_external_skia,Nico60/external_skia,TeamBliss-LP/android_external_skia,PAC-ROM/android_external_skia,F-AOSP/platform_external_skia,TeamEOS/external_skia,suyouxin/android_external_skia,codeaurora-unoffical/platform-external-skia,TeamBliss-LP/android_external_skia,xhteam/external-skia,MinimalOS/android_external_skia,codewalkerster/android_external_skia,UnicornButter/external_skia,nfxosp/platform_external_skia,AOSPB/external_skia,SaleJumper/android-source-browsing.platform--external--skia,zhaochengw/platform_external_skia,w3nd1go/android_external_skia,freerunner/platform_external_skia,Android-AOSP/external_skia,nfxosp/platform_external_skia,UltimatumKang/android_external_skia-1,Android-AOSP/external_skia,Pure-Aosp/android_external_skia,AsteroidOS/android_external_skia,SlimSaber/android_external_skia,Infinitive-OS/platform_external_skia,TeamBliss-LP/android_external_skia,geekboxzone/mmallow_external_skia,Tesla-Redux/android_external_skia,VRToxin-AOSP/android_external_skia,AOKPSaber/android_external_skia,VentureROM-L/android_external_skia,Android-AOSP/external_skia,C-RoM-KK/android_external_skia,brothaedhung/external_skia,codeaurora-unoffical/platform-external-skia,Hybrid-Rom/external_skia,Pure-Aosp/android_external_skia,CandyKat/external_skia,YUPlayGodDev/platform_external_skia,zhaochengw/platform_external_skia,FusionSP/android_external_skia,SaleJumper/android-source-browsing.platform--external--skia,Plain-Andy/android_platform_external_skia,PAC-ROM/android_external_skia,NamelessRom/android_external_skia,AOSP-S4-KK/platform_external_skia,MonkeyZZZZ/platform_external_skia,android-ia/platform_external_skia,geekboxzone/lollipop_external_skia,Omegaphora/external_skia,spezi77/android_external_skia,NamelessRom/android_external_skia,mozilla-b2g/external_skia,Infinitive-OS/platform_external_skia,cubox-i/android_external_skia,MinimalOS/android_external_skia,TeslaProject/external_skia,FusionSP/android_external_skia,nfxosp/platform_external_skia,Khaon/android_external_skia,marlontoe/android_external_skia,geekboxzone/mmallow_external_skia,Fusion-Rom/android_external_skia,thiz11/platform_external_skia,PurityPlus/android_external_skia,Asteroid-Project/android_external_skia,TeslaOS/android_external_skia,byterom/android_external_skia,PurityPlus/android_external_skia,NamelessRom/android_external_skia,LOSP/external_skia,ChameleonOS/android_external_skia,xzzz9097/android_external_skia,embest-tech/android_external_skia,Khaon/android_external_skia,xzzz9097/android_external_skia,OneRom/external_skia,geekboxzone/mmallow_external_skia,VentureROM-L/android_external_skia,zhaochengw/platform_external_skia,HealthyHoney/temasek_SKIA,Mahdi-Rom/android_external_skia,opensourcechipspark/platform_external_skia,Infinitive-OS/platform_external_skia,YUPlayGodDev/platform_external_skia,aospo/platform_external_skia,CNA/android_external_skia,opensourcechipspark/platform_external_skia,InsomniaROM/platform_external_skia,Khaon/android_external_skia,OptiPop/external_skia,Infusion-OS/android_external_skia,VRToxin-AOSP/android_external_skia,shashlik/android-skia,AsteroidOS/android_external_skia,Asteroid-Project/android_external_skia,TeamEOS/external_skia,LOSP/external_skia,AOSP-YU/platform_external_skia,sudosurootdev/external_skia,AOSP-S4-KK/platform_external_skia,pacerom/external_skia,X-ROM/android_external_skia,Euphoria-OS-Legacy/android_external_skia,MinimalOS-AOSP/platform_external_skia,Asteroid-Project/android_external_skia,ctiao/platform-external-skia,aosp-mirror/platform_external_skia,HealthyHoney/temasek_SKIA,cubox-i/android_external_skia,Purity-Lollipop/platform_external_skia,marlontoe/android_external_skia,AOKPSaber/android_external_skia,TeamTwisted/external_skia,VentureROM-L/android_external_skia,Hikari-no-Tenshi/android_external_skia,Pure-Aosp/android_external_skia,InfinitiveOS/external_skia,UBERMALLOW/external_skia,VentureROM-L/android_external_skia,AOSP-YU/platform_external_skia,Hybrid-Rom/external_skia,houst0nn/external_skia,NamelessRom/android_external_skia,UBERMALLOW/external_skia,Gateworks/skia,GladeRom/android_external_skia,AOKP/external_skia,timduru/platform-external-skia,invisiblek/android_external_skia,suyouxin/android_external_skia,Infinitive-OS/platform_external_skia,Android-AOSP/external_skia,TeslaOS/android_external_skia,ParanoidAndroid/android_external_skia,ModdedPA/android_external_skia,sigysmund/platform_external_skia,Hikari-no-Tenshi/android_external_skia,spezi77/android_external_skia,spezi77/android_external_skia,IllusionRom-deprecated/android_platform_external_skia,InsomniaAOSP/platform_external_skia,boulzordev/android_external_skia,android-ia/platform_external_skia,ModdedPA/android_external_skia,UnicornButter/external_skia,aosp-mirror/platform_external_skia,temasek/android_external_skia,ench0/external_skia,Hikari-no-Tenshi/android_external_skia,w3nd1go/android_external_skia,Fusion-Rom/android_external_skia,sombree/android_external_skia,embest-tech/android_external_skia,cuboxi/android_external_skia,spezi77/android_external_skia,MinimalOS/external_skia,Tesla-Redux/android_external_skia,geekboxzone/mmallow_external_skia,VRToxin-AOSP/android_external_skia,X-ROM/android_external_skia,yinquan529/platform-external-skia,InsomniaROM/platform_external_skia,Gateworks/skia,InsomniaAOSP/platform_external_skia,Infusion-OS/android_external_skia,upndwn4par/android_external_skia,OneRom/external_skia,ctiao/platform-external-skia,AOKPSaber/android_external_skia,TeamExodus/external_skia,ModdedPA/android_external_skia,UBERMALLOW/external_skia,MinimalOS/external_skia,PAC-ROM/android_external_skia,Android4SAM/platform_external_skia,Tesla-Redux/android_external_skia,Hikari-no-Tenshi/android_external_skia,VentureROM-L/android_external_skia,YUPlayGodDev/platform_external_skia,InsomniaROM/platform_external_skia,Fusion-Rom/android_external_skia,Tesla-Redux/android_external_skia,YUPlayGodDev/platform_external_skia,MyAOSP/external_skia,OptiPop/external_skia,boulzordev/android_external_skia,AndroidOpenDevelopment/android_external_skia,Euphoria-OS-Legacy/android_external_skia
c
## Code Before: class SkBitmapRegionDecoder { public: SkBitmapRegionDecoder(SkImageDecoder *decoder, int width, int height) { fDecoder = decoder; fWidth = width; fHeight = height; } virtual ~SkBitmapRegionDecoder() { delete fDecoder; } virtual bool decodeRegion(SkBitmap* bitmap, SkIRect rect, SkBitmap::Config pref, int sampleSize); virtual int getWidth() { return fWidth; } virtual int getHeight() { return fHeight; } virtual SkImageDecoder* getDecoder() { return fDecoder; } private: SkImageDecoder *fDecoder; int fWidth; int fHeight; }; #endif ## Instruction: Fix 3510563: memory leak in BitmapRegionDecoder. Change-Id: I30b3a3806f4484d95602539def1a77a366560fdf ## Code After: class SkBitmapRegionDecoder { public: SkBitmapRegionDecoder(SkImageDecoder *decoder, SkStream *stream, int width, int height) { fDecoder = decoder; fStream = stream; fWidth = width; fHeight = height; } virtual ~SkBitmapRegionDecoder() { delete fDecoder; fStream->unref(); } virtual bool decodeRegion(SkBitmap* bitmap, SkIRect rect, SkBitmap::Config pref, int sampleSize); virtual int getWidth() { return fWidth; } virtual int getHeight() { return fHeight; } virtual SkImageDecoder* getDecoder() { return fDecoder; } private: SkImageDecoder *fDecoder; SkStream *fStream; int fWidth; int fHeight; }; #endif
// ... existing code ... class SkBitmapRegionDecoder { public: SkBitmapRegionDecoder(SkImageDecoder *decoder, SkStream *stream, int width, int height) { fDecoder = decoder; fStream = stream; fWidth = width; fHeight = height; } virtual ~SkBitmapRegionDecoder() { delete fDecoder; fStream->unref(); } virtual bool decodeRegion(SkBitmap* bitmap, SkIRect rect, // ... modified code ... private: SkImageDecoder *fDecoder; SkStream *fStream; int fWidth; int fHeight; }; // ... rest of the code ...
3f223b1ba079882d16303595a215457d08a5a3b6
src/main/java/org/kohsuke/github/GHIssueBuilder.java
src/main/java/org/kohsuke/github/GHIssueBuilder.java
package org.kohsuke.github; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author Kohsuke Kawaguchi */ public class GHIssueBuilder { private final GHRepository repo; private final Requester builder; private List<String> labels = new ArrayList<String>(); GHIssueBuilder(GHRepository repo, String title) { this.repo = repo; this.builder = new Requester(repo.root); builder.with("title",title); } /** * Sets the main text of an issue, which is arbitrary multi-line text. */ public GHIssueBuilder body(String str) { builder.with("body",str); return this; } public GHIssueBuilder assignee(GHUser user) { if (user!=null) builder.with("assignee",user.getLogin()); return this; } public GHIssueBuilder assignee(String user) { if (user!=null) builder.with("assignee",user); return this; } public GHIssueBuilder milestone(GHMilestone milestone) { if (milestone!=null) builder.with("milestone",milestone.getNumber()); return this; } public GHIssueBuilder label(String label) { if (label!=null) labels.add(label); return this; } /** * Creates a new issue. */ public GHIssue create() throws IOException { return builder.with("labels",labels).to(repo.getApiTailUrl("issues"),GHIssue.class).wrap(repo); } }
package org.kohsuke.github; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author Kohsuke Kawaguchi */ public class GHIssueBuilder { private final GHRepository repo; private final Requester builder; private List<String> labels = new ArrayList<String>(); private List<String> assignees = new ArrayList<String>(); GHIssueBuilder(GHRepository repo, String title) { this.repo = repo; this.builder = new Requester(repo.root); builder.with("title",title); } /** * Sets the main text of an issue, which is arbitrary multi-line text. */ public GHIssueBuilder body(String str) { builder.with("body",str); return this; } public GHIssueBuilder assignee(GHUser user) { if (user!=null) assignees.add(user.getLogin()); return this; } public GHIssueBuilder assignee(String user) { if (user!=null) assignees.add(user); return this; } public GHIssueBuilder milestone(GHMilestone milestone) { if (milestone!=null) builder.with("milestone",milestone.getNumber()); return this; } public GHIssueBuilder label(String label) { if (label!=null) labels.add(label); return this; } /** * Creates a new issue. */ public GHIssue create() throws IOException { return builder.with("labels",labels).with("assignees",assignees).to(repo.getApiTailUrl("issues"),GHIssue.class).wrap(repo); } }
Support assignees when creating a new issue
Support assignees when creating a new issue
Java
mit
recena/github-api,Shredder121/github-api,jeffnelson/github-api,kohsuke/github-api,stephenc/github-api
java
## Code Before: package org.kohsuke.github; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author Kohsuke Kawaguchi */ public class GHIssueBuilder { private final GHRepository repo; private final Requester builder; private List<String> labels = new ArrayList<String>(); GHIssueBuilder(GHRepository repo, String title) { this.repo = repo; this.builder = new Requester(repo.root); builder.with("title",title); } /** * Sets the main text of an issue, which is arbitrary multi-line text. */ public GHIssueBuilder body(String str) { builder.with("body",str); return this; } public GHIssueBuilder assignee(GHUser user) { if (user!=null) builder.with("assignee",user.getLogin()); return this; } public GHIssueBuilder assignee(String user) { if (user!=null) builder.with("assignee",user); return this; } public GHIssueBuilder milestone(GHMilestone milestone) { if (milestone!=null) builder.with("milestone",milestone.getNumber()); return this; } public GHIssueBuilder label(String label) { if (label!=null) labels.add(label); return this; } /** * Creates a new issue. */ public GHIssue create() throws IOException { return builder.with("labels",labels).to(repo.getApiTailUrl("issues"),GHIssue.class).wrap(repo); } } ## Instruction: Support assignees when creating a new issue ## Code After: package org.kohsuke.github; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author Kohsuke Kawaguchi */ public class GHIssueBuilder { private final GHRepository repo; private final Requester builder; private List<String> labels = new ArrayList<String>(); private List<String> assignees = new ArrayList<String>(); GHIssueBuilder(GHRepository repo, String title) { this.repo = repo; this.builder = new Requester(repo.root); builder.with("title",title); } /** * Sets the main text of an issue, which is arbitrary multi-line text. */ public GHIssueBuilder body(String str) { builder.with("body",str); return this; } public GHIssueBuilder assignee(GHUser user) { if (user!=null) assignees.add(user.getLogin()); return this; } public GHIssueBuilder assignee(String user) { if (user!=null) assignees.add(user); return this; } public GHIssueBuilder milestone(GHMilestone milestone) { if (milestone!=null) builder.with("milestone",milestone.getNumber()); return this; } public GHIssueBuilder label(String label) { if (label!=null) labels.add(label); return this; } /** * Creates a new issue. */ public GHIssue create() throws IOException { return builder.with("labels",labels).with("assignees",assignees).to(repo.getApiTailUrl("issues"),GHIssue.class).wrap(repo); } }
// ... existing code ... private final GHRepository repo; private final Requester builder; private List<String> labels = new ArrayList<String>(); private List<String> assignees = new ArrayList<String>(); GHIssueBuilder(GHRepository repo, String title) { this.repo = repo; // ... modified code ... public GHIssueBuilder assignee(GHUser user) { if (user!=null) assignees.add(user.getLogin()); return this; } public GHIssueBuilder assignee(String user) { if (user!=null) assignees.add(user); return this; } ... * Creates a new issue. */ public GHIssue create() throws IOException { return builder.with("labels",labels).with("assignees",assignees).to(repo.getApiTailUrl("issues"),GHIssue.class).wrap(repo); } } // ... rest of the code ...
55ef7aa61e7c0980c53f9ae25dbf8ee368b6db1c
setup.py
setup.py
from setuptools import setup setup( name='whichpkg', version='0.3.0', author='Matt Robenolt', author_email='[email protected]', url='https://github.com/mattrobenolt/whichpkg', description='Locate the path of a specific python module', long_description=__doc__, install_requires=[], scripts=['bin/whichpkg'], zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Topic :: Software Development' ], )
import re from setuptools import setup version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0] setup( name='whichpkg', version=version, author='Matt Robenolt', author_email='[email protected]', url='https://github.com/mattrobenolt/whichpkg', description='Locate the path of a specific python module', long_description=__doc__, install_requires=[], scripts=['bin/whichpkg'], zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Topic :: Software Development' ], )
Read the __version__ from whichpkg directly
Read the __version__ from whichpkg directly
Python
bsd-2-clause
mattrobenolt/whichpkg,pombredanne/whichpkg
python
## Code Before: from setuptools import setup setup( name='whichpkg', version='0.3.0', author='Matt Robenolt', author_email='[email protected]', url='https://github.com/mattrobenolt/whichpkg', description='Locate the path of a specific python module', long_description=__doc__, install_requires=[], scripts=['bin/whichpkg'], zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Topic :: Software Development' ], ) ## Instruction: Read the __version__ from whichpkg directly ## Code After: import re from setuptools import setup version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0] setup( name='whichpkg', version=version, author='Matt Robenolt', author_email='[email protected]', url='https://github.com/mattrobenolt/whichpkg', description='Locate the path of a specific python module', long_description=__doc__, install_requires=[], scripts=['bin/whichpkg'], zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Topic :: Software Development' ], )
// ... existing code ... import re from setuptools import setup version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0] setup( name='whichpkg', version=version, author='Matt Robenolt', author_email='[email protected]', url='https://github.com/mattrobenolt/whichpkg', // ... rest of the code ...
ee275298e4b0e86da9b1b4b111ac127395d22c31
java/squeek/quakemovement/QuakeServerPlayer.java
java/squeek/quakemovement/QuakeServerPlayer.java
package squeek.quakemovement; import api.player.server.ServerPlayerAPI; import api.player.server.ServerPlayerBase; public class QuakeServerPlayer extends ServerPlayerBase { private boolean wasVelocityChangedBeforeFall = false; public QuakeServerPlayer(ServerPlayerAPI playerapi) { super(playerapi); } @Override public void fall(float fallDistance, float damageMultiplier) { wasVelocityChangedBeforeFall = this.playerAPI.getVelocityChangedField() || this.player.velocityChanged; if (ModConfig.INCREASED_FALL_DISTANCE != 0.0D) { fallDistance -= ModConfig.INCREASED_FALL_DISTANCE; } super.fall(fallDistance, fallDistance); this.playerAPI.setVelocityChangedField(wasVelocityChangedBeforeFall); this.player.velocityChanged = wasVelocityChangedBeforeFall; } }
package squeek.quakemovement; import api.player.server.ServerPlayerAPI; import api.player.server.ServerPlayerBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingFallEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public class QuakeServerPlayer extends ServerPlayerBase { private boolean wasVelocityChangedBeforeFall = false; public QuakeServerPlayer(ServerPlayerAPI playerapi) { super(playerapi); MinecraftForge.EVENT_BUS.register(this); } @SubscribeEvent public void onLivingFall(LivingFallEvent event) { if (!(event.entity instanceof EntityPlayer)) return; if (ModConfig.INCREASED_FALL_DISTANCE != 0.0D) { event.distance = (float) (event.distance - ModConfig.INCREASED_FALL_DISTANCE); } } @Override public void beforeFall(float fallDistance, float damageMultiplier) { wasVelocityChangedBeforeFall = this.playerAPI.getVelocityChangedField() || this.player.velocityChanged; } @Override public void afterFall(float fallDistance, float damageMultiplier) { this.playerAPI.setVelocityChangedField(wasVelocityChangedBeforeFall); this.player.velocityChanged = wasVelocityChangedBeforeFall; } }
Fix fall damage being applied wrong
Fix fall damage being applied wrong
Java
unlicense
squeek502/Squake
java
## Code Before: package squeek.quakemovement; import api.player.server.ServerPlayerAPI; import api.player.server.ServerPlayerBase; public class QuakeServerPlayer extends ServerPlayerBase { private boolean wasVelocityChangedBeforeFall = false; public QuakeServerPlayer(ServerPlayerAPI playerapi) { super(playerapi); } @Override public void fall(float fallDistance, float damageMultiplier) { wasVelocityChangedBeforeFall = this.playerAPI.getVelocityChangedField() || this.player.velocityChanged; if (ModConfig.INCREASED_FALL_DISTANCE != 0.0D) { fallDistance -= ModConfig.INCREASED_FALL_DISTANCE; } super.fall(fallDistance, fallDistance); this.playerAPI.setVelocityChangedField(wasVelocityChangedBeforeFall); this.player.velocityChanged = wasVelocityChangedBeforeFall; } } ## Instruction: Fix fall damage being applied wrong ## Code After: package squeek.quakemovement; import api.player.server.ServerPlayerAPI; import api.player.server.ServerPlayerBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingFallEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public class QuakeServerPlayer extends ServerPlayerBase { private boolean wasVelocityChangedBeforeFall = false; public QuakeServerPlayer(ServerPlayerAPI playerapi) { super(playerapi); MinecraftForge.EVENT_BUS.register(this); } @SubscribeEvent public void onLivingFall(LivingFallEvent event) { if (!(event.entity instanceof EntityPlayer)) return; if (ModConfig.INCREASED_FALL_DISTANCE != 0.0D) { event.distance = (float) (event.distance - ModConfig.INCREASED_FALL_DISTANCE); } } @Override public void beforeFall(float fallDistance, float damageMultiplier) { wasVelocityChangedBeforeFall = this.playerAPI.getVelocityChangedField() || this.player.velocityChanged; } @Override public void afterFall(float fallDistance, float damageMultiplier) { this.playerAPI.setVelocityChangedField(wasVelocityChangedBeforeFall); this.player.velocityChanged = wasVelocityChangedBeforeFall; } }
# ... existing code ... import api.player.server.ServerPlayerAPI; import api.player.server.ServerPlayerBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingFallEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public class QuakeServerPlayer extends ServerPlayerBase { # ... modified code ... public QuakeServerPlayer(ServerPlayerAPI playerapi) { super(playerapi); MinecraftForge.EVENT_BUS.register(this); } @SubscribeEvent public void onLivingFall(LivingFallEvent event) { if (!(event.entity instanceof EntityPlayer)) return; if (ModConfig.INCREASED_FALL_DISTANCE != 0.0D) { event.distance = (float) (event.distance - ModConfig.INCREASED_FALL_DISTANCE); } } @Override public void beforeFall(float fallDistance, float damageMultiplier) { wasVelocityChangedBeforeFall = this.playerAPI.getVelocityChangedField() || this.player.velocityChanged; } @Override public void afterFall(float fallDistance, float damageMultiplier) { this.playerAPI.setVelocityChangedField(wasVelocityChangedBeforeFall); this.player.velocityChanged = wasVelocityChangedBeforeFall; } # ... rest of the code ...
0115386db31faf1876915510a48ca5c0fe05f113
src/main/java/com/github/timtebeek/graphql/jpa/GraphqlJpaAutoConfiguration.java
src/main/java/com/github/timtebeek/graphql/jpa/GraphqlJpaAutoConfiguration.java
package com.github.timtebeek.graphql.jpa; import javax.persistence.EntityManager; import org.crygier.graphql.GraphQLExecutor; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration @SuppressWarnings("static-method") public class GraphqlJpaAutoConfiguration { @Bean @ConditionalOnMissingBean(GraphQLExecutor.class) public GraphQLExecutor graphQLExecutor(final EntityManager entityManager) { return new GraphQLExecutor(entityManager); } }
package com.github.timtebeek.graphql.jpa; import javax.persistence.EntityManager; import org.crygier.graphql.GraphQLExecutor; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; @Configuration @ComponentScan @SuppressWarnings("static-method") public class GraphqlJpaAutoConfiguration { @Bean @ConditionalOnMissingBean(GraphQLExecutor.class) public GraphQLExecutor graphQLExecutor(final EntityManager entityManager) { return new GraphQLExecutor(entityManager); } }
Add @ComponentScan to discover GraphQLController
Add @ComponentScan to discover GraphQLController
Java
mit
timtebeek/graphql-jpa-spring-boot-starter,timtebeek/graphql-jpa-spring-boot-starter
java
## Code Before: package com.github.timtebeek.graphql.jpa; import javax.persistence.EntityManager; import org.crygier.graphql.GraphQLExecutor; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration @SuppressWarnings("static-method") public class GraphqlJpaAutoConfiguration { @Bean @ConditionalOnMissingBean(GraphQLExecutor.class) public GraphQLExecutor graphQLExecutor(final EntityManager entityManager) { return new GraphQLExecutor(entityManager); } } ## Instruction: Add @ComponentScan to discover GraphQLController ## Code After: package com.github.timtebeek.graphql.jpa; import javax.persistence.EntityManager; import org.crygier.graphql.GraphQLExecutor; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; @Configuration @ComponentScan @SuppressWarnings("static-method") public class GraphqlJpaAutoConfiguration { @Bean @ConditionalOnMissingBean(GraphQLExecutor.class) public GraphQLExecutor graphQLExecutor(final EntityManager entityManager) { return new GraphQLExecutor(entityManager); } }
... import org.crygier.graphql.GraphQLExecutor; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; @Configuration @ComponentScan @SuppressWarnings("static-method") public class GraphqlJpaAutoConfiguration { @Bean ...
6e9059f24e75b37333af017f8facdb3426144ecf
conf/jupyter_notebook_config.py
conf/jupyter_notebook_config.py
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading
Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading
Python
bsd-3-clause
NII-cloud-operation/Jupyter-LC_docker,NII-cloud-operation/Jupyter-LC_docker
python
## Code Before: import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID'] ## Instruction: Remove SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID from os.environ after reading ## Code After: import os c.NotebookApp.ip = '*' c.NotebookApp.allow_remote_access = True c.MultiKernelManager.kernel_manager_class = 'lc_wrapper.LCWrapperKernelManager' c.KernelManager.shutdown_wait_time = 10.0 c.FileContentsManager.delete_to_trash = False c.NotebookApp.quit_button = False c.NotebookApp.kernel_spec_manager_class = 'lc_wrapper.LCWrapperKernelSpecManager' if 'PASSWORD' in os.environ: from notebook.auth import passwd c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID']
# ... existing code ... if 'SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID' in os.environ: c.ScrapboxAPI.cookie_connect_sid = os.environ['SIDESTICKIES_SCRAPBOX_' 'COOKIE_CONNECT_SID'] del os.environ['SIDESTICKIES_SCRAPBOX_COOKIE_CONNECT_SID'] if 'SIDESTICKIES_SCRAPBOX_PROJECT_ID' in os.environ: c.ScrapboxAPI.project_id = os.environ['SIDESTICKIES_SCRAPBOX_PROJECT_ID'] # ... rest of the code ...
3f6a8afc49aa83d74a1541e88daf65cca593473d
ode/app/managers/nodes/UserManager.java
ode/app/managers/nodes/UserManager.java
package managers.nodes; import com.fasterxml.jackson.databind.JsonNode; import play.libs.F.Promise; public class UserManager extends LabeledNodeWithPropertiesManager { public UserManager() { this.label = "User"; } @Override protected Promise<Boolean> create(JsonNode properties, String location) { return super.create(properties, location, "username"); } }
package managers.nodes; import com.fasterxml.jackson.databind.JsonNode; import play.libs.F.Promise; public class UserManager extends LabeledNodeWithPropertiesManager { public UserManager() { this.label = "User"; } // CREATE @Override protected Promise<Boolean> create(JsonNode properties, String location) { return super.create(properties, location, "username"); } }
Enable operations that write to the DB to be rolled back on error by wrapping them in transactions.
WIP: Enable operations that write to the DB to be rolled back on error by wrapping them in transactions. Add comments to indicate purpose of groups of related `UserManager` methods.
Java
agpl-3.0
itsjeyd/ODE,itsjeyd/ODE,itsjeyd/ODE,itsjeyd/ODE
java
## Code Before: package managers.nodes; import com.fasterxml.jackson.databind.JsonNode; import play.libs.F.Promise; public class UserManager extends LabeledNodeWithPropertiesManager { public UserManager() { this.label = "User"; } @Override protected Promise<Boolean> create(JsonNode properties, String location) { return super.create(properties, location, "username"); } } ## Instruction: WIP: Enable operations that write to the DB to be rolled back on error by wrapping them in transactions. Add comments to indicate purpose of groups of related `UserManager` methods. ## Code After: package managers.nodes; import com.fasterxml.jackson.databind.JsonNode; import play.libs.F.Promise; public class UserManager extends LabeledNodeWithPropertiesManager { public UserManager() { this.label = "User"; } // CREATE @Override protected Promise<Boolean> create(JsonNode properties, String location) { return super.create(properties, location, "username"); } }
# ... existing code ... this.label = "User"; } // CREATE @Override protected Promise<Boolean> create(JsonNode properties, String location) { return super.create(properties, location, "username"); # ... rest of the code ...
4ba46ac6674f7972d70d2e4f819303e38a934462
setup.py
setup.py
from ez_setup import use_setuptools use_setuptools() from setuptools import setup import sys, os sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'loadimpact')) from version import __version__ setup( name='loadimpact', version=__version__, author='Load Impact', author_email='[email protected]', packages=['loadimpact'], url='http://developers.loadimpact.com/', license='LICENSE.txt', description="The Load Impact API SDK provides Python APIs to create and manage load tests", install_requires=['requests'], classifiers=[ 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Python Modules' ], keywords="loadimpact api rest sdk", test_suite='test' )
# Make sure setup tools is installed, if not install it. from ez_setup import use_setuptools use_setuptools() from setuptools import setup import sys, os sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'loadimpact')) from version import __version__ setup( name='loadimpact', version=__version__, author='Load Impact', author_email='[email protected]', packages=['loadimpact'], url='http://developers.loadimpact.com/', license='LICENSE.txt', description="The Load Impact SDK provides access to Load Impact's cloud-based load testing platform", install_requires=['requests'], classifiers=[ 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Python Modules' ], keywords="loadimpact api rest sdk", test_suite='test' )
Add license notice and update description.
Add license notice and update description.
Python
apache-2.0
loadimpact/loadimpact-sdk-python
python
## Code Before: from ez_setup import use_setuptools use_setuptools() from setuptools import setup import sys, os sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'loadimpact')) from version import __version__ setup( name='loadimpact', version=__version__, author='Load Impact', author_email='[email protected]', packages=['loadimpact'], url='http://developers.loadimpact.com/', license='LICENSE.txt', description="The Load Impact API SDK provides Python APIs to create and manage load tests", install_requires=['requests'], classifiers=[ 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Python Modules' ], keywords="loadimpact api rest sdk", test_suite='test' ) ## Instruction: Add license notice and update description. ## Code After: # Make sure setup tools is installed, if not install it. from ez_setup import use_setuptools use_setuptools() from setuptools import setup import sys, os sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'loadimpact')) from version import __version__ setup( name='loadimpact', version=__version__, author='Load Impact', author_email='[email protected]', packages=['loadimpact'], url='http://developers.loadimpact.com/', license='LICENSE.txt', description="The Load Impact SDK provides access to Load Impact's cloud-based load testing platform", install_requires=['requests'], classifiers=[ 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Python Modules' ], keywords="loadimpact api rest sdk", test_suite='test' )
// ... existing code ... # Make sure setup tools is installed, if not install it. from ez_setup import use_setuptools use_setuptools() // ... modified code ... packages=['loadimpact'], url='http://developers.loadimpact.com/', license='LICENSE.txt', description="The Load Impact SDK provides access to Load Impact's cloud-based load testing platform", install_requires=['requests'], classifiers=[ 'Intended Audience :: Developers', // ... rest of the code ...
b4932c9e95b34a875c8d5234a1aa025aa5d5dad0
migrations/versions/07ebe99161d5_add_banner_image_url_to_sessio.py
migrations/versions/07ebe99161d5_add_banner_image_url_to_sessio.py
# revision identifiers, used by Alembic. revision = '07ebe99161d5' down_revision = 'd6b1904bea0e' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('session', sa.Column('banner_image_url', sa.Unicode(length=2000), nullable=True)) def downgrade(): op.drop_column('session', 'banner_image_url')
# revision identifiers, used by Alembic. revision = '07ebe99161d5' down_revision = '60a132ae73f1' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('session', sa.Column('banner_image_url', sa.Unicode(length=2000), nullable=True)) def downgrade(): op.drop_column('session', 'banner_image_url')
Update down_revision in migration file.
Update down_revision in migration file.
Python
agpl-3.0
hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel
python
## Code Before: # revision identifiers, used by Alembic. revision = '07ebe99161d5' down_revision = 'd6b1904bea0e' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('session', sa.Column('banner_image_url', sa.Unicode(length=2000), nullable=True)) def downgrade(): op.drop_column('session', 'banner_image_url') ## Instruction: Update down_revision in migration file. ## Code After: # revision identifiers, used by Alembic. revision = '07ebe99161d5' down_revision = '60a132ae73f1' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('session', sa.Column('banner_image_url', sa.Unicode(length=2000), nullable=True)) def downgrade(): op.drop_column('session', 'banner_image_url')
... # revision identifiers, used by Alembic. revision = '07ebe99161d5' down_revision = '60a132ae73f1' from alembic import op import sqlalchemy as sa ...
854b0968afc41894d8cf79d712175b497df9828e
bolt/spark/utils.py
bolt/spark/utils.py
def get_kv_shape(shape, key_axes): func = lambda axis: shape[axis] return _get_kv_func(func, shape, key_axes) def get_kv_axes(shape, key_axes): func = lambda axis: axis return _get_kv_func(func, shape, key_axes) def _get_kv_func(func, shape, key_axes): key_res = [func(axis) for axis in key_axes] value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes] return key_res, value_res def zip_with_index(rdd): """ Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] count = None if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1): starts.append(starts[-1] + nums[i]) def func(k, it): for i, v in enumerate(it, starts[k]): yield v, i return count, rdd.mapPartitionsWithIndex(func)
def get_kv_shape(shape, key_axes): func = lambda axis: shape[axis] return _get_kv_func(func, shape, key_axes) def get_kv_axes(shape, key_axes): func = lambda axis: axis return _get_kv_func(func, shape, key_axes) def _get_kv_func(func, shape, key_axes): key_res = [func(axis) for axis in key_axes] value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes] return key_res, value_res def zip_with_index(rdd): """ Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1): starts.append(starts[-1] + nums[i]) else: count = rdd.count() def func(k, it): for i, v in enumerate(it, starts[k]): yield v, i return count, rdd.mapPartitionsWithIndex(func)
Fix for count with one partition
Fix for count with one partition
Python
apache-2.0
bolt-project/bolt,andrewosh/bolt,jwittenbach/bolt
python
## Code Before: def get_kv_shape(shape, key_axes): func = lambda axis: shape[axis] return _get_kv_func(func, shape, key_axes) def get_kv_axes(shape, key_axes): func = lambda axis: axis return _get_kv_func(func, shape, key_axes) def _get_kv_func(func, shape, key_axes): key_res = [func(axis) for axis in key_axes] value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes] return key_res, value_res def zip_with_index(rdd): """ Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] count = None if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1): starts.append(starts[-1] + nums[i]) def func(k, it): for i, v in enumerate(it, starts[k]): yield v, i return count, rdd.mapPartitionsWithIndex(func) ## Instruction: Fix for count with one partition ## Code After: def get_kv_shape(shape, key_axes): func = lambda axis: shape[axis] return _get_kv_func(func, shape, key_axes) def get_kv_axes(shape, key_axes): func = lambda axis: axis return _get_kv_func(func, shape, key_axes) def _get_kv_func(func, shape, key_axes): key_res = [func(axis) for axis in key_axes] value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes] return key_res, value_res def zip_with_index(rdd): """ Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1): starts.append(starts[-1] + nums[i]) else: count = rdd.count() def func(k, it): for i, v in enumerate(it, starts[k]): yield v, i return count, rdd.mapPartitionsWithIndex(func)
// ... existing code ... Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1): starts.append(starts[-1] + nums[i]) else: count = rdd.count() def func(k, it): for i, v in enumerate(it, starts[k]): // ... rest of the code ...
d7219365197ff22aec44836e37af19f62420f996
paystackapi/tests/test_tcontrol.py
paystackapi/tests/test_tcontrol.py
import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.tcontrol import TransferControl class TestTransfer(BaseTestCase): @httpretty.activate def test_check_balance(self): """Method defined to test check_balance.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/balance"), content_type='text/json', body='{"status": true, "message": "Balances retrieved"}', status=201, ) response = TransferControl.check_balance() self.assertTrue(response['status'])
import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.tcontrol import TransferControl class TestTransfer(BaseTestCase): @httpretty.activate def test_check_balance(self): """Method defined to test check_balance.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/balance"), content_type='text/json', body='{"status": true, "message": "Balances retrieved"}', status=201, ) response = TransferControl.check_balance() self.assertTrue(response['status']) @httpretty.activate def test_resend_otp(self): """Method defined to test resend_otp.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/transfer/resend_otp"), content_type='text/json', body='{"status": true, "message": "OTP has been resent"}', status=201, ) response = TransferControl.resend_otp( transfer_code="TRF_vsyqdmlzble3uii", reason="Just do it." ) self.assertTrue(response['status'])
Add test for transfer control resend otp
Add test for transfer control resend otp
Python
mit
andela-sjames/paystack-python
python
## Code Before: import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.tcontrol import TransferControl class TestTransfer(BaseTestCase): @httpretty.activate def test_check_balance(self): """Method defined to test check_balance.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/balance"), content_type='text/json', body='{"status": true, "message": "Balances retrieved"}', status=201, ) response = TransferControl.check_balance() self.assertTrue(response['status']) ## Instruction: Add test for transfer control resend otp ## Code After: import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.tcontrol import TransferControl class TestTransfer(BaseTestCase): @httpretty.activate def test_check_balance(self): """Method defined to test check_balance.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/balance"), content_type='text/json', body='{"status": true, "message": "Balances retrieved"}', status=201, ) response = TransferControl.check_balance() self.assertTrue(response['status']) @httpretty.activate def test_resend_otp(self): """Method defined to test resend_otp.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/transfer/resend_otp"), content_type='text/json', body='{"status": true, "message": "OTP has been resent"}', status=201, ) response = TransferControl.resend_otp( transfer_code="TRF_vsyqdmlzble3uii", reason="Just do it." ) self.assertTrue(response['status'])
// ... existing code ... response = TransferControl.check_balance() self.assertTrue(response['status']) @httpretty.activate def test_resend_otp(self): """Method defined to test resend_otp.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/transfer/resend_otp"), content_type='text/json', body='{"status": true, "message": "OTP has been resent"}', status=201, ) response = TransferControl.resend_otp( transfer_code="TRF_vsyqdmlzble3uii", reason="Just do it." ) self.assertTrue(response['status']) // ... rest of the code ...
fff0087f82c3f79d5e60e32071a4e89478d8b85e
tests/test_element.py
tests/test_element.py
import pkg_resources pkg_resources.require('cothread') import cothread import rml.element def test_create_element(): e = rml.element.Element('BPM', 6.0) assert e.get_type() == 'BPM' assert e.get_length() == 6.0 def test_add_element_to_family(): e = rml.element.Element('dummy', 0.0) e.add_to_family('fam') assert 'fam' in e.get_families() def test_get_pv_value(): PV = 'SR22C-DI-EBPM-04:SA:X' e = rml.element.Element('dummy', 0.0, pv=PV) result = e.get_pv('x') assert isinstance(result, float)
import pkg_resources pkg_resources.require('cothread') import cothread import rml import rml.element def test_create_element(): e = rml.element.Element('BPM', 6.0) assert e.get_type() == 'BPM' assert e.get_length() == 6.0 def test_add_element_to_family(): e = rml.element.Element('dummy', 0.0) e.add_to_family('fam') assert 'fam' in e.get_families() def test_get_pv_value(): PV = 'SR22C-DI-EBPM-04:SA:X' e = rml.element.Element('dummy', 0.0) e.set_pv('x', PV) result = e.get_pv('x') assert isinstance(result, float) with pytest.raises(rml.ConfigException): e.get_pv('y')
Make pvs in Element behave more realistically.
Make pvs in Element behave more realistically.
Python
apache-2.0
razvanvasile/RML,willrogers/pml,willrogers/pml
python
## Code Before: import pkg_resources pkg_resources.require('cothread') import cothread import rml.element def test_create_element(): e = rml.element.Element('BPM', 6.0) assert e.get_type() == 'BPM' assert e.get_length() == 6.0 def test_add_element_to_family(): e = rml.element.Element('dummy', 0.0) e.add_to_family('fam') assert 'fam' in e.get_families() def test_get_pv_value(): PV = 'SR22C-DI-EBPM-04:SA:X' e = rml.element.Element('dummy', 0.0, pv=PV) result = e.get_pv('x') assert isinstance(result, float) ## Instruction: Make pvs in Element behave more realistically. ## Code After: import pkg_resources pkg_resources.require('cothread') import cothread import rml import rml.element def test_create_element(): e = rml.element.Element('BPM', 6.0) assert e.get_type() == 'BPM' assert e.get_length() == 6.0 def test_add_element_to_family(): e = rml.element.Element('dummy', 0.0) e.add_to_family('fam') assert 'fam' in e.get_families() def test_get_pv_value(): PV = 'SR22C-DI-EBPM-04:SA:X' e = rml.element.Element('dummy', 0.0) e.set_pv('x', PV) result = e.get_pv('x') assert isinstance(result, float) with pytest.raises(rml.ConfigException): e.get_pv('y')
... import pkg_resources pkg_resources.require('cothread') import cothread import rml import rml.element ... def test_get_pv_value(): PV = 'SR22C-DI-EBPM-04:SA:X' e = rml.element.Element('dummy', 0.0) e.set_pv('x', PV) result = e.get_pv('x') assert isinstance(result, float) with pytest.raises(rml.ConfigException): e.get_pv('y') ...
6c2dae9bad86bf3f40d892eba50853d704f696b7
pombola/settings/tests.py
pombola/settings/tests.py
from .base import * COUNTRY_APP = None INSTALLED_APPS = INSTALLED_APPS + \ ('pombola.hansard', 'pombola.projects', 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner' ) + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. ENABLED_FEATURES = {} for key in ALL_OPTIONAL_APPS: # add in the optional apps ENABLED_FEATURES[key] = ('pombola.' + key in INSTALLED_APPS) or (key in INSTALLED_APPS) BREADCRUMB_URL_NAME_MAPPINGS = { 'organisation' : ('Organisations', '/organisation/all/'), }
from .base import * COUNTRY_APP = None INSTALLED_APPS = INSTALLED_APPS + \ ('pombola.hansard', 'pombola.projects', 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner', 'pombola.interests_register') + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. ENABLED_FEATURES = {} for key in ALL_OPTIONAL_APPS: # add in the optional apps ENABLED_FEATURES[key] = ('pombola.' + key in INSTALLED_APPS) or (key in INSTALLED_APPS) BREADCRUMB_URL_NAME_MAPPINGS = { 'organisation' : ('Organisations', '/organisation/all/'), }
Make sure that the interests_register tables are created
Make sure that the interests_register tables are created Nose tries to run the interests_register tests, but they will fail unless the interest_register app is added to INSTALLED_APPS, because its tables won't be created in the test database.
Python
agpl-3.0
patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,mysociety/pombola,hzj123/56th
python
## Code Before: from .base import * COUNTRY_APP = None INSTALLED_APPS = INSTALLED_APPS + \ ('pombola.hansard', 'pombola.projects', 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner' ) + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. ENABLED_FEATURES = {} for key in ALL_OPTIONAL_APPS: # add in the optional apps ENABLED_FEATURES[key] = ('pombola.' + key in INSTALLED_APPS) or (key in INSTALLED_APPS) BREADCRUMB_URL_NAME_MAPPINGS = { 'organisation' : ('Organisations', '/organisation/all/'), } ## Instruction: Make sure that the interests_register tables are created Nose tries to run the interests_register tests, but they will fail unless the interest_register app is added to INSTALLED_APPS, because its tables won't be created in the test database. ## Code After: from .base import * COUNTRY_APP = None INSTALLED_APPS = INSTALLED_APPS + \ ('pombola.hansard', 'pombola.projects', 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner', 'pombola.interests_register') + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. ENABLED_FEATURES = {} for key in ALL_OPTIONAL_APPS: # add in the optional apps ENABLED_FEATURES[key] = ('pombola.' + key in INSTALLED_APPS) or (key in INSTALLED_APPS) BREADCRUMB_URL_NAME_MAPPINGS = { 'organisation' : ('Organisations', '/organisation/all/'), }
# ... existing code ... 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner', 'pombola.interests_register') + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. # ... rest of the code ...
f2fbda74f4036b7ddf1cb31712d48f55f2652675
java/first_test/FstGenerator.java
java/first_test/FstGenerator.java
import java.util.List; class FstGenerator { private StringBuffer strBuff; private List<State> fstStates; public FstGenerator(List<State> fstStates) { this.fstStates = fstStates; this.strBuff = new StringBuffer(); } public StringBuffer compute() { append("class HelloWorld {"); generateMain(); append("}"); return strBuff; } private void append(String strToAppend) { strBuff.append(strToAppend); strBuff.append("\n"); } private void generateMain() { appendWithTab("public void main(String... args) {", 1); appendWithTab("System.out.println(\"Hello World !\");", 2); appendWithTab("}", 1); } private void appendWithTab(String strToAppend, int numberOfTab) { for (int i = 0; i < numberOfTab; i++) { strBuff.append("\t"); } strBuff.append(strToAppend); strBuff.append("\n"); } }
import java.util.List; class FstGenerator { private StringBuffer strBuff; private List<State> fstStates; public FstGenerator(List<State> fstStates) { this.fstStates = fstStates; this.strBuff = new StringBuffer(); } public StringBuffer compute() { append("class FstCompute {"); emptyLine(); generateMain(); emptyLine(); for ( State state : fstStates ) { generateStateCase(state); } append("}"); return strBuff; } private void generateStateCase(State state) { } private void append(String strToAppend) { strBuff.append(strToAppend); strBuff.append("\n"); } private void generateMain() { appendWithTab("public static int compute(int[] token) {", 1); appendWithTab("return node_0(token, 0, 0);", 2); appendWithTab("}", 1); } private void appendWithTab(String strToAppend, int numberOfTab) { for (int i = 0; i < numberOfTab; i++) { strBuff.append("\t"); } strBuff.append(strToAppend); strBuff.append("\n"); } private void emptyLine() { strBuff.append("\n"); } }
Rename generated class and filled main method
MINOR: Rename generated class and filled main method
Java
apache-2.0
devictr/fst-jit,devictr/fst-jit,devictr/fst-jit
java
## Code Before: import java.util.List; class FstGenerator { private StringBuffer strBuff; private List<State> fstStates; public FstGenerator(List<State> fstStates) { this.fstStates = fstStates; this.strBuff = new StringBuffer(); } public StringBuffer compute() { append("class HelloWorld {"); generateMain(); append("}"); return strBuff; } private void append(String strToAppend) { strBuff.append(strToAppend); strBuff.append("\n"); } private void generateMain() { appendWithTab("public void main(String... args) {", 1); appendWithTab("System.out.println(\"Hello World !\");", 2); appendWithTab("}", 1); } private void appendWithTab(String strToAppend, int numberOfTab) { for (int i = 0; i < numberOfTab; i++) { strBuff.append("\t"); } strBuff.append(strToAppend); strBuff.append("\n"); } } ## Instruction: MINOR: Rename generated class and filled main method ## Code After: import java.util.List; class FstGenerator { private StringBuffer strBuff; private List<State> fstStates; public FstGenerator(List<State> fstStates) { this.fstStates = fstStates; this.strBuff = new StringBuffer(); } public StringBuffer compute() { append("class FstCompute {"); emptyLine(); generateMain(); emptyLine(); for ( State state : fstStates ) { generateStateCase(state); } append("}"); return strBuff; } private void generateStateCase(State state) { } private void append(String strToAppend) { strBuff.append(strToAppend); strBuff.append("\n"); } private void generateMain() { appendWithTab("public static int compute(int[] token) {", 1); appendWithTab("return node_0(token, 0, 0);", 2); appendWithTab("}", 1); } private void appendWithTab(String strToAppend, int numberOfTab) { for (int i = 0; i < numberOfTab; i++) { strBuff.append("\t"); } strBuff.append(strToAppend); strBuff.append("\n"); } private void emptyLine() { strBuff.append("\n"); } }
... } public StringBuffer compute() { append("class FstCompute {"); emptyLine(); generateMain(); emptyLine(); for ( State state : fstStates ) { generateStateCase(state); } append("}"); return strBuff; } private void generateStateCase(State state) { } private void append(String strToAppend) { ... } private void generateMain() { appendWithTab("public static int compute(int[] token) {", 1); appendWithTab("return node_0(token, 0, 0);", 2); appendWithTab("}", 1); } ... strBuff.append("\n"); } private void emptyLine() { strBuff.append("\n"); } } ...
3a586e2d584de1a70dd62ca0c9548fbc7a092164
calvin/runtime/south/calvinlib/textformatlib/Pystache.py
calvin/runtime/south/calvinlib/textformatlib/Pystache.py
from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs)
from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
Fix erroneous schema naming & others
calvinlib: Fix erroneous schema naming & others
Python
apache-2.0
EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base
python
## Code Before: from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs) ## Instruction: calvinlib: Fix erroneous schema naming & others ## Code After: from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
# ... existing code ... class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, # ... modified code ... def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary) # ... rest of the code ...
fe0867e5499b627e776d132d300d17b40858dcab
line_profiler.py
line_profiler.py
from cProfile import label import marshal from _line_profiler import LineProfiler as CLineProfiler class LineProfiler(CLineProfiler): """ A subclass of the C version solely to provide a decorator since Cython does not have closures. """ def __call__(self, func): """ Decorate a function to start the profiler on function entry and stop it on function exit. """ def f(*args, **kwds): self.add_function(func) self.enable_by_count() try: result = func(*args, **kwds) finally: self.disable_by_count() return result f.__name__ = func.__name__ f.__doc__ = func.__doc__ f.__dict__.update(func.__dict__) return f def dump_stats(self, filename): """ Dump a representation of the data to a file as a marshalled dictionary from `get_stats()`. """ stats = self.get_stats() f = open(filename, 'wb') try: marshal.dump(stats, f) finally: f.close()
from cProfile import label import marshal from _line_profiler import LineProfiler as CLineProfiler class LineProfiler(CLineProfiler): """ A subclass of the C version solely to provide a decorator since Cython does not have closures. """ def __call__(self, func): """ Decorate a function to start the profiler on function entry and stop it on function exit. """ def f(*args, **kwds): self.add_function(func) self.enable_by_count() try: result = func(*args, **kwds) finally: self.disable_by_count() return result f.__name__ = func.__name__ f.__doc__ = func.__doc__ f.__dict__.update(func.__dict__) return f def dump_stats(self, filename): """ Dump a representation of the data to a file as a marshalled dictionary from `get_stats()`. """ stats = self.get_stats() f = open(filename, 'wb') try: marshal.dump(stats, f) finally: f.close() def run(self, cmd): """ Profile a single executable statment in the main namespace. """ import __main__ dict = __main__.__dict__ return self.runctx(cmd, dict, dict) def runctx(self, cmd, globals, locals): """ Profile a single executable statement in the given namespaces. """ self.enable_by_count() try: exec cmd in globals, locals finally: self.disable_by_count() return self def runcall(self, func, *args, **kw): """ Profile a single function call. """ self.enable_by_count() try: return func(*args, **kw) finally: self.disable_by_count()
Add the typical run/runctx/runcall methods.
ENH: Add the typical run/runctx/runcall methods.
Python
bsd-3-clause
amegianeg/line_profiler,jstasiak/line_profiler,dreampuf/lprofiler,dreampuf/lprofiler,eblur/line_profiler,jstasiak/line_profiler,ymero/line_profiler,eblur/line_profiler,certik/line_profiler,certik/line_profiler,amegianeg/line_profiler,Doctorhoenikker/line_profiler,jsalva/line_profiler,Doctorhoenikker/line_profiler,ymero/line_profiler,jsalva/line_profiler,dreampuf/lprofiler
python
## Code Before: from cProfile import label import marshal from _line_profiler import LineProfiler as CLineProfiler class LineProfiler(CLineProfiler): """ A subclass of the C version solely to provide a decorator since Cython does not have closures. """ def __call__(self, func): """ Decorate a function to start the profiler on function entry and stop it on function exit. """ def f(*args, **kwds): self.add_function(func) self.enable_by_count() try: result = func(*args, **kwds) finally: self.disable_by_count() return result f.__name__ = func.__name__ f.__doc__ = func.__doc__ f.__dict__.update(func.__dict__) return f def dump_stats(self, filename): """ Dump a representation of the data to a file as a marshalled dictionary from `get_stats()`. """ stats = self.get_stats() f = open(filename, 'wb') try: marshal.dump(stats, f) finally: f.close() ## Instruction: ENH: Add the typical run/runctx/runcall methods. ## Code After: from cProfile import label import marshal from _line_profiler import LineProfiler as CLineProfiler class LineProfiler(CLineProfiler): """ A subclass of the C version solely to provide a decorator since Cython does not have closures. """ def __call__(self, func): """ Decorate a function to start the profiler on function entry and stop it on function exit. """ def f(*args, **kwds): self.add_function(func) self.enable_by_count() try: result = func(*args, **kwds) finally: self.disable_by_count() return result f.__name__ = func.__name__ f.__doc__ = func.__doc__ f.__dict__.update(func.__dict__) return f def dump_stats(self, filename): """ Dump a representation of the data to a file as a marshalled dictionary from `get_stats()`. """ stats = self.get_stats() f = open(filename, 'wb') try: marshal.dump(stats, f) finally: f.close() def run(self, cmd): """ Profile a single executable statment in the main namespace. """ import __main__ dict = __main__.__dict__ return self.runctx(cmd, dict, dict) def runctx(self, cmd, globals, locals): """ Profile a single executable statement in the given namespaces. """ self.enable_by_count() try: exec cmd in globals, locals finally: self.disable_by_count() return self def runcall(self, func, *args, **kw): """ Profile a single function call. """ self.enable_by_count() try: return func(*args, **kw) finally: self.disable_by_count()
# ... existing code ... finally: f.close() def run(self, cmd): """ Profile a single executable statment in the main namespace. """ import __main__ dict = __main__.__dict__ return self.runctx(cmd, dict, dict) def runctx(self, cmd, globals, locals): """ Profile a single executable statement in the given namespaces. """ self.enable_by_count() try: exec cmd in globals, locals finally: self.disable_by_count() return self def runcall(self, func, *args, **kw): """ Profile a single function call. """ self.enable_by_count() try: return func(*args, **kw) finally: self.disable_by_count() # ... rest of the code ...
88fd32be09bc20ce734f272b7d3a54a71958e6b4
energy/models.py
energy/models.py
from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
Use sqlalchemy to generate query
Use sqlalchemy to generate query
Python
agpl-3.0
aguinane/energyusage,aguinane/energyusage,aguinane/energyusage,aguinane/energyusage
python
## Code Before: from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata ## Instruction: Use sqlalchemy to generate query ## Code After: from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
// ... existing code ... from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): // ... modified code ... """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' // ... rest of the code ...
1fb46372db32fdb2606c560a1bbec168628039a7
lucid/modelzoo/other_models/CLIPx4.py
lucid/modelzoo/other_models/CLIPx4.py
from lucid.modelzoo.vision_base import Model class CLIPImage(Model): image_value_range = (0, 255) input_name = 'input_image' model_name = "RN50_4x" image_shape = [288, 288, 3] model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb"
from lucid.modelzoo.vision_base import Model class CLIPImage(Model): image_value_range = (0, 255) input_name = 'input_image' model_name = "RN50_4x" image_shape = [288, 288, 3] model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
Update CLIP model URL to modelzoo bucket
Update CLIP model URL to modelzoo bucket
Python
apache-2.0
tensorflow/lucid,tensorflow/lucid,tensorflow/lucid,tensorflow/lucid
python
## Code Before: from lucid.modelzoo.vision_base import Model class CLIPImage(Model): image_value_range = (0, 255) input_name = 'input_image' model_name = "RN50_4x" image_shape = [288, 288, 3] model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb" ## Instruction: Update CLIP model URL to modelzoo bucket ## Code After: from lucid.modelzoo.vision_base import Model class CLIPImage(Model): image_value_range = (0, 255) input_name = 'input_image' model_name = "RN50_4x" image_shape = [288, 288, 3] model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
# ... existing code ... input_name = 'input_image' model_name = "RN50_4x" image_shape = [288, 288, 3] model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb" # ... rest of the code ...
43f66e4e816f5e7a49536af2042487bf3304c414
src/main/resources/archetype-resources/__rootArtifactId__-ejb/src/main/java/ejb/LoggerProducer.java
src/main/resources/archetype-resources/__rootArtifactId__-ejb/src/main/java/ejb/LoggerProducer.java
package ${package}.ejb; import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.InjectionPoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LoggerProducer { @Produces public Logger getLogger(InjectionPoint injectionPoint) { return LoggerFactory.getLogger( injectionPoint.getBean().getBeanClass()); } }
package ${package}.ejb; import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.InjectionPoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LoggerProducer { @Produces public Logger getLogger(InjectionPoint injectionPoint) { Class<?> clazz; if (injectionPoint.getBean() != null) { clazz = injectionPoint.getBean().getBeanClass(); } else { clazz = injectionPoint.getMember().getDeclaringClass(); } return LoggerFactory.getLogger(clazz); } }
Fix Logger injection in GlassFish 4
Fix Logger injection in GlassFish 4 Signed-off-by: Fred Bricon <[email protected]>
Java
apache-2.0
open-archetypes/multi-javaee6-archetype
java
## Code Before: package ${package}.ejb; import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.InjectionPoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LoggerProducer { @Produces public Logger getLogger(InjectionPoint injectionPoint) { return LoggerFactory.getLogger( injectionPoint.getBean().getBeanClass()); } } ## Instruction: Fix Logger injection in GlassFish 4 Signed-off-by: Fred Bricon <[email protected]> ## Code After: package ${package}.ejb; import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.InjectionPoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LoggerProducer { @Produces public Logger getLogger(InjectionPoint injectionPoint) { Class<?> clazz; if (injectionPoint.getBean() != null) { clazz = injectionPoint.getBean().getBeanClass(); } else { clazz = injectionPoint.getMember().getDeclaringClass(); } return LoggerFactory.getLogger(clazz); } }
// ... existing code ... import org.slf4j.LoggerFactory; public class LoggerProducer { @Produces public Logger getLogger(InjectionPoint injectionPoint) { Class<?> clazz; if (injectionPoint.getBean() != null) { clazz = injectionPoint.getBean().getBeanClass(); } else { clazz = injectionPoint.getMember().getDeclaringClass(); } return LoggerFactory.getLogger(clazz); } } // ... rest of the code ...
6858e4a2e2047c906a3b8f69b7cd7b04a0cbf666
pivoteer/writer/censys.py
pivoteer/writer/censys.py
from pivoteer.writer.core import CsvWriter class CensysCsvWriter(CsvWriter): """ A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record) """ def __init__(self, writer): """ Create a new CsvWriter for Censys Records using the given writer. :param writer: The writer """ super(CensysCsvWriter, self).__init__(writer) def create_title_rows(self, indicator, records): yield ["Certificate Search Results"] def create_header(self): return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"] def create_rows(self, record): info = record["info"] records = info["records"] for record in records: parsed = record["parsed"] subject = parsed["subject_dn"] issuer = parsed["issuer_dn"] sha256 = parsed["fingerprint_sha256"] validity = parsed["validity"] start = validity["start"] end = validity["end"] yield [subject, issuer, sha256, start, end]
from pivoteer.writer.core import CsvWriter class CensysCsvWriter(CsvWriter): """ A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record) """ def __init__(self, writer): """ Create a new CsvWriter for Censys Records using the given writer. :param writer: The writer """ super(CensysCsvWriter, self).__init__(writer) def create_title_rows(self, indicator, records): yield ["Certificate Search Results"] def create_header(self): return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"] def create_rows(self, record): if (record is not None and len(record) > 0): info = record["info"] records = info["records"] for record in records: parsed = record["parsed"] subject = parsed["subject_dn"] issuer = parsed["issuer_dn"] sha256 = parsed["fingerprint_sha256"] validity = parsed["validity"] start = validity["start"] end = validity["end"] yield [subject, issuer, sha256, start, end]
Resolve issues with exporting empty dataset for certificate list
Resolve issues with exporting empty dataset for certificate list
Python
mit
gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID
python
## Code Before: from pivoteer.writer.core import CsvWriter class CensysCsvWriter(CsvWriter): """ A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record) """ def __init__(self, writer): """ Create a new CsvWriter for Censys Records using the given writer. :param writer: The writer """ super(CensysCsvWriter, self).__init__(writer) def create_title_rows(self, indicator, records): yield ["Certificate Search Results"] def create_header(self): return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"] def create_rows(self, record): info = record["info"] records = info["records"] for record in records: parsed = record["parsed"] subject = parsed["subject_dn"] issuer = parsed["issuer_dn"] sha256 = parsed["fingerprint_sha256"] validity = parsed["validity"] start = validity["start"] end = validity["end"] yield [subject, issuer, sha256, start, end] ## Instruction: Resolve issues with exporting empty dataset for certificate list ## Code After: from pivoteer.writer.core import CsvWriter class CensysCsvWriter(CsvWriter): """ A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record) """ def __init__(self, writer): """ Create a new CsvWriter for Censys Records using the given writer. :param writer: The writer """ super(CensysCsvWriter, self).__init__(writer) def create_title_rows(self, indicator, records): yield ["Certificate Search Results"] def create_header(self): return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"] def create_rows(self, record): if (record is not None and len(record) > 0): info = record["info"] records = info["records"] for record in records: parsed = record["parsed"] subject = parsed["subject_dn"] issuer = parsed["issuer_dn"] sha256 = parsed["fingerprint_sha256"] validity = parsed["validity"] start = validity["start"] end = validity["end"] yield [subject, issuer, sha256, start, end]
// ... existing code ... """ super(CensysCsvWriter, self).__init__(writer) def create_title_rows(self, indicator, records): yield ["Certificate Search Results"] // ... modified code ... return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"] def create_rows(self, record): if (record is not None and len(record) > 0): info = record["info"] records = info["records"] for record in records: parsed = record["parsed"] subject = parsed["subject_dn"] issuer = parsed["issuer_dn"] sha256 = parsed["fingerprint_sha256"] validity = parsed["validity"] start = validity["start"] end = validity["end"] yield [subject, issuer, sha256, start, end] // ... rest of the code ...
3ff245f667167309ec41941036daa358bc60209d
nrf5/sdk/nrf5_sdk_conf.h
nrf5/sdk/nrf5_sdk_conf.h
// SD specific configurations. #if (BLUETOOTH_SD == 100) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_6LOWPAN (1) #define MICROPY_PY_USOCKET (1) #define MICROPY_PY_NETWORK (1) #elif (BLUETOOTH_SD == 110) #define MICROPY_PY_BLE (1) #elif (BLUETOOTH_SD == 132) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #else #error "SD not supported" #endif // Default defines. #ifndef MICROPY_PY_BLE_6LOWPAN #define MICROPY_PY_BLE_6LOWPAN (0) #endif #ifndef MICROPY_PY_BLE #define MICROPY_PY_BLE (0) #endif #ifndef MICROPY_PY_BLE_NUS #define MICROPY_PY_BLE_NUS (0) #endif #endif
// SD specific configurations. #if (BLUETOOTH_SD == 100) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_6LOWPAN (1) #define MICROPY_PY_USOCKET (1) #define MICROPY_PY_NETWORK (1) #elif (BLUETOOTH_SD == 110) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #elif (BLUETOOTH_SD == 132) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #else #error "SD not supported" #endif // Default defines. #ifndef MICROPY_PY_BLE_6LOWPAN #define MICROPY_PY_BLE_6LOWPAN (0) #endif #ifndef MICROPY_PY_BLE #define MICROPY_PY_BLE (0) #endif #ifndef MICROPY_PY_BLE_NUS #define MICROPY_PY_BLE_NUS (0) #endif #endif
Enable ubluepy module if s110 bluetooth stack is enabled.
nrf5/sdk: Enable ubluepy module if s110 bluetooth stack is enabled.
C
mit
tralamazza/micropython,tralamazza/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/micropython,tralamazza/micropython,adafruit/micropython,adafruit/circuitpython,tralamazza/micropython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython
c
## Code Before: // SD specific configurations. #if (BLUETOOTH_SD == 100) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_6LOWPAN (1) #define MICROPY_PY_USOCKET (1) #define MICROPY_PY_NETWORK (1) #elif (BLUETOOTH_SD == 110) #define MICROPY_PY_BLE (1) #elif (BLUETOOTH_SD == 132) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #else #error "SD not supported" #endif // Default defines. #ifndef MICROPY_PY_BLE_6LOWPAN #define MICROPY_PY_BLE_6LOWPAN (0) #endif #ifndef MICROPY_PY_BLE #define MICROPY_PY_BLE (0) #endif #ifndef MICROPY_PY_BLE_NUS #define MICROPY_PY_BLE_NUS (0) #endif #endif ## Instruction: nrf5/sdk: Enable ubluepy module if s110 bluetooth stack is enabled. ## Code After: // SD specific configurations. #if (BLUETOOTH_SD == 100) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_6LOWPAN (1) #define MICROPY_PY_USOCKET (1) #define MICROPY_PY_NETWORK (1) #elif (BLUETOOTH_SD == 110) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #elif (BLUETOOTH_SD == 132) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #else #error "SD not supported" #endif // Default defines. #ifndef MICROPY_PY_BLE_6LOWPAN #define MICROPY_PY_BLE_6LOWPAN (0) #endif #ifndef MICROPY_PY_BLE #define MICROPY_PY_BLE (0) #endif #ifndef MICROPY_PY_BLE_NUS #define MICROPY_PY_BLE_NUS (0) #endif #endif
// ... existing code ... #elif (BLUETOOTH_SD == 110) #define MICROPY_PY_BLE (1) #define MICROPY_PY_BLE_NUS (0) #define MICROPY_PY_UBLUEPY (1) #define MICROPY_PY_UBLUEPY_PERIPHERAL (1) #elif (BLUETOOTH_SD == 132) // ... rest of the code ...
3e1d59b91cfe84dd57558047a2d841fe5cc9bd6b
bdp/platform/frontend/setup.py
bdp/platform/frontend/setup.py
import os from setuptools import setup, find_packages def find_files(path): files = [] for dirname, subdirnames, filenames in os.walk(path): for subdirname in subdirnames: files.extend(find_files(os.path.join(dirname, subdirname))) for filename in filenames: files.append(os.path.join(dirname, filename)) return files setup( name = "bdp_fe", version = "0.1.0", description = "Big Data Platform Frontend", long_description = ("This package is a web interface for the Big Data " "Platform. Through this frontend, a user can lauch " "Haddop jobs, read and interpret its results."), author = "Telefonica Digital", author_email = "[email protected]", package_dir = {'': 'src'}, packages = find_packages('src'), package_data = {'': ['templates/*']}, data_files = [('share/bdp_fe/static', find_files('src/bdp_fe/jobconf/static/'))], install_requires = [ 'setuptools', 'pymongo', 'django', 'coverage', 'django-jenkins', 'thrift', 'flup', 'MySQL-python', ], classifiers = [ "Development Status :: 3 - Alpha", ], )
import os from setuptools import setup, find_packages def find_files(path): files = [] for dirname, subdirnames, filenames in os.walk(path): for subdirname in subdirnames: files.extend(find_files(os.path.join(dirname, subdirname))) for filename in filenames: files.append(os.path.join(dirname, filename)) return files setup( name = "bdp_fe", version = "0.1.0", description = "Big Data Platform Frontend", long_description = ("This package is a web interface for the Big Data " "Platform. Through this frontend, a user can lauch " "Haddop jobs, read and interpret its results."), author = "Telefonica Digital", author_email = "[email protected]", package_dir = {'': 'src'}, packages = find_packages('src'), package_data = {'': ['templates/*']}, data_files = [('share/bdp_fe/static', find_files('src/bdp_fe/jobconf/static/'))], install_requires = [ 'setuptools', 'pymongo', 'django', 'coverage', 'pylint', 'django-jenkins', 'thrift', 'flup', 'MySQL-python', ], classifiers = [ "Development Status :: 3 - Alpha", ], )
Add pylint to installation requirements
Add pylint to installation requirements
Python
apache-2.0
telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform
python
## Code Before: import os from setuptools import setup, find_packages def find_files(path): files = [] for dirname, subdirnames, filenames in os.walk(path): for subdirname in subdirnames: files.extend(find_files(os.path.join(dirname, subdirname))) for filename in filenames: files.append(os.path.join(dirname, filename)) return files setup( name = "bdp_fe", version = "0.1.0", description = "Big Data Platform Frontend", long_description = ("This package is a web interface for the Big Data " "Platform. Through this frontend, a user can lauch " "Haddop jobs, read and interpret its results."), author = "Telefonica Digital", author_email = "[email protected]", package_dir = {'': 'src'}, packages = find_packages('src'), package_data = {'': ['templates/*']}, data_files = [('share/bdp_fe/static', find_files('src/bdp_fe/jobconf/static/'))], install_requires = [ 'setuptools', 'pymongo', 'django', 'coverage', 'django-jenkins', 'thrift', 'flup', 'MySQL-python', ], classifiers = [ "Development Status :: 3 - Alpha", ], ) ## Instruction: Add pylint to installation requirements ## Code After: import os from setuptools import setup, find_packages def find_files(path): files = [] for dirname, subdirnames, filenames in os.walk(path): for subdirname in subdirnames: files.extend(find_files(os.path.join(dirname, subdirname))) for filename in filenames: files.append(os.path.join(dirname, filename)) return files setup( name = "bdp_fe", version = "0.1.0", description = "Big Data Platform Frontend", long_description = ("This package is a web interface for the Big Data " "Platform. Through this frontend, a user can lauch " "Haddop jobs, read and interpret its results."), author = "Telefonica Digital", author_email = "[email protected]", package_dir = {'': 'src'}, packages = find_packages('src'), package_data = {'': ['templates/*']}, data_files = [('share/bdp_fe/static', find_files('src/bdp_fe/jobconf/static/'))], install_requires = [ 'setuptools', 'pymongo', 'django', 'coverage', 'pylint', 'django-jenkins', 'thrift', 'flup', 'MySQL-python', ], classifiers = [ "Development Status :: 3 - Alpha", ], )
# ... existing code ... 'pymongo', 'django', 'coverage', 'pylint', 'django-jenkins', 'thrift', 'flup', # ... rest of the code ...
7e93c8622698ad01bc4dbea44f1f80ef90415d72
core/src/main/java/eu/nerro/wolappla/model/Device.java
core/src/main/java/eu/nerro/wolappla/model/Device.java
package eu.nerro.wolappla.model; public class Device { private String name; private String macAddress; private String ipAddress; private int port; private String ssid; }
package eu.nerro.wolappla.model; public class Device { private String name; private String macAddress; private String ipAddress; private int port; }
Remove currently not used 'ssid' field in device model
Remove currently not used 'ssid' field in device model
Java
mit
nerro/wolappla
java
## Code Before: package eu.nerro.wolappla.model; public class Device { private String name; private String macAddress; private String ipAddress; private int port; private String ssid; } ## Instruction: Remove currently not used 'ssid' field in device model ## Code After: package eu.nerro.wolappla.model; public class Device { private String name; private String macAddress; private String ipAddress; private int port; }
... private String macAddress; private String ipAddress; private int port; } ...
5b6bfd2202b9ad7a3eafd0585ce5c37dd6f5d91f
tests/test_queue.c
tests/test_queue.c
char *test_enqueue_dequeue() { int i; MQUE_DEFINE_STRUCT(int, 5) queue; MQUE_INITIALISE(&queue); MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); for (i = 1; i <= 5; i++) { MUNT_ASSERT(!MQUE_IS_FULL(&queue)); MQUE_ENQUEUE(&queue, i); } for (i = 1; i <= 5; i++) { int head; MUNT_ASSERT(!MQUE_IS_EMPTY(&queue)); head = MQUE_HEAD(&queue); MQUE_DEQUEUE(&queue);; } MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); return 0; }
char *test_enqueue_dequeue() { int i; MQUE_DEFINE_STRUCT(int, 5) queue; MQUE_INITIALISE(&queue); MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); for (i = 1; i <= 5; i++) { MUNT_ASSERT(!MQUE_IS_FULL(&queue)); MQUE_ENQUEUE(&queue, i); } for (i = 1; i <= 5; i++) { int head; MUNT_ASSERT(!MQUE_IS_EMPTY(&queue)); head = MQUE_HEAD(&queue); MQUE_DEQUEUE(&queue);; MUNT_ASSERT(head == i); } MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); return 0; }
Check enqueued values in unit test
Check enqueued values in unit test
C
mit
jawebada/libmbb,jawebada/libmbb,jawebada/libmbb,jawebada/libmbb
c
## Code Before: char *test_enqueue_dequeue() { int i; MQUE_DEFINE_STRUCT(int, 5) queue; MQUE_INITIALISE(&queue); MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); for (i = 1; i <= 5; i++) { MUNT_ASSERT(!MQUE_IS_FULL(&queue)); MQUE_ENQUEUE(&queue, i); } for (i = 1; i <= 5; i++) { int head; MUNT_ASSERT(!MQUE_IS_EMPTY(&queue)); head = MQUE_HEAD(&queue); MQUE_DEQUEUE(&queue);; } MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); return 0; } ## Instruction: Check enqueued values in unit test ## Code After: char *test_enqueue_dequeue() { int i; MQUE_DEFINE_STRUCT(int, 5) queue; MQUE_INITIALISE(&queue); MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); for (i = 1; i <= 5; i++) { MUNT_ASSERT(!MQUE_IS_FULL(&queue)); MQUE_ENQUEUE(&queue, i); } for (i = 1; i <= 5; i++) { int head; MUNT_ASSERT(!MQUE_IS_EMPTY(&queue)); head = MQUE_HEAD(&queue); MQUE_DEQUEUE(&queue);; MUNT_ASSERT(head == i); } MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); return 0; }
# ... existing code ... head = MQUE_HEAD(&queue); MQUE_DEQUEUE(&queue);; MUNT_ASSERT(head == i); } MUNT_ASSERT(MQUE_IS_EMPTY(&queue)); # ... rest of the code ...
4de72b4bd349ebf16c0046c4ed9034914c03ffb5
cea/interfaces/dashboard/api/utils.py
cea/interfaces/dashboard/api/utils.py
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
Fix `weather_helper` bug when creating new scenario
Fix `weather_helper` bug when creating new scenario
Python
mit
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
python
## Code Before: from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params ## Instruction: Fix `weather_helper` bug when creating new scenario ## Code After: from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
// ... existing code ... import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': // ... rest of the code ...
6110bc1137f5e3f1f12249c366323c6c0b48dbe3
IPython/nbconvert/utils/base.py
IPython/nbconvert/utils/base.py
"""Global configuration class.""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.utils.traitlets import List from IPython.config.configurable import LoggingConfigurable from IPython.utils.traitlets import Unicode #----------------------------------------------------------------------------- # Classes and functions #----------------------------------------------------------------------------- class NbConvertBase(LoggingConfigurable): """Global configurable class for shared config Useful for display data priority that might be use by many transformers """ display_data_priority = List(['javascript', 'html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'], config=True, help= """ An ordered list of preferred output type, the first encountered will usually be used when converting discarding the others. """ ) default_language = Unicode('ipython', config=True, help='default highlight language') def __init__(self, **kw): super(NbConvertBase, self).__init__(**kw)
"""Global configuration class.""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.utils.traitlets import List from IPython.config.configurable import LoggingConfigurable from IPython.utils.traitlets import Unicode #----------------------------------------------------------------------------- # Classes and functions #----------------------------------------------------------------------------- class NbConvertBase(LoggingConfigurable): """Global configurable class for shared config Useful for display data priority that might be use by many transformers """ display_data_priority = List(['html', 'javascript', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'], config=True, help= """ An ordered list of preferred output type, the first encountered will usually be used when converting discarding the others. """ ) default_language = Unicode('ipython', config=True, help='default highlight language') def __init__(self, **kw): super(NbConvertBase, self).__init__(**kw)
Revert "Moved JS in front of HTML"
Revert "Moved JS in front of HTML" This reverts commit 8b0164edde418138d4e28c20d63fa422931ae6a8.
Python
bsd-3-clause
ipython/ipython,ipython/ipython
python
## Code Before: """Global configuration class.""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.utils.traitlets import List from IPython.config.configurable import LoggingConfigurable from IPython.utils.traitlets import Unicode #----------------------------------------------------------------------------- # Classes and functions #----------------------------------------------------------------------------- class NbConvertBase(LoggingConfigurable): """Global configurable class for shared config Useful for display data priority that might be use by many transformers """ display_data_priority = List(['javascript', 'html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'], config=True, help= """ An ordered list of preferred output type, the first encountered will usually be used when converting discarding the others. """ ) default_language = Unicode('ipython', config=True, help='default highlight language') def __init__(self, **kw): super(NbConvertBase, self).__init__(**kw) ## Instruction: Revert "Moved JS in front of HTML" This reverts commit 8b0164edde418138d4e28c20d63fa422931ae6a8. ## Code After: """Global configuration class.""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.utils.traitlets import List from IPython.config.configurable import LoggingConfigurable from IPython.utils.traitlets import Unicode #----------------------------------------------------------------------------- # Classes and functions #----------------------------------------------------------------------------- class NbConvertBase(LoggingConfigurable): """Global configurable class for shared config Useful for display data priority that might be use by many transformers """ display_data_priority = List(['html', 'javascript', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'], config=True, help= """ An ordered list of preferred output type, the first encountered will usually be used when converting discarding the others. """ ) default_language = Unicode('ipython', config=True, help='default highlight language') def __init__(self, **kw): super(NbConvertBase, self).__init__(**kw)
... Useful for display data priority that might be use by many transformers """ display_data_priority = List(['html', 'javascript', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'], config=True, help= """ An ordered list of preferred output type, the first ...
e98eeadb9d5906bf65efc7a17658ae498cfcf27d
chainer/utils/__init__.py
chainer/utils/__init__.py
import contextlib import shutil import tempfile import numpy from chainer.utils import walker_alias # NOQA # import class and function from chainer.utils.conv import get_conv_outsize # NOQA from chainer.utils.conv import get_deconv_outsize # NOQA from chainer.utils.experimental import experimental # NOQA from chainer.utils.walker_alias import WalkerAlias # NOQA def force_array(x, dtype=None): # numpy returns a float value (scalar) when a return value of an operator # is a 0-dimension array. # We need to convert such a value to a 0-dimension array because `Function` # object needs to return an `numpy.ndarray`. if numpy.isscalar(x): if dtype is None: return numpy.array(x) else: return numpy.array(x, dtype) else: if dtype is None: return x else: return x.astype(dtype, copy=False) def force_type(dtype, value): if numpy.isscalar(value): return dtype.type(value) elif value.dtype != dtype: return value.astype(dtype, copy=False) else: return value @contextlib.contextmanager def tempdir(**kwargs): # A context manager that defines a lifetime of a temporary directory. temp_dir = tempfile.mkdtemp(**kwargs) try: yield temp_dir finally: shutil.rmtree(temp_dir, ignore_errors=True)
import contextlib import shutil import tempfile import numpy from chainer.utils import walker_alias # NOQA # import class and function from chainer.utils.conv import get_conv_outsize # NOQA from chainer.utils.conv import get_deconv_outsize # NOQA from chainer.utils.experimental import experimental # NOQA from chainer.utils.walker_alias import WalkerAlias # NOQA def force_array(x, dtype=None): # numpy returns a float value (scalar) when a return value of an operator # is a 0-dimension array. # We need to convert such a value to a 0-dimension array because `Function` # object needs to return an `numpy.ndarray`. if numpy.isscalar(x): if dtype is None: return numpy.array(x) else: return numpy.array(x, dtype) else: if dtype is None: return x else: return x.astype(dtype, copy=False) def force_type(dtype, value): if numpy.isscalar(value): return dtype.type(value) elif value.dtype != dtype: return value.astype(dtype, copy=False) else: return value @contextlib.contextmanager def tempdir(**kwargs): # A context manager that defines a lifetime of a temporary directory. ignore_errors = kwargs.pop('ignore_errors', False) temp_dir = tempfile.mkdtemp(**kwargs) try: yield temp_dir finally: shutil.rmtree(temp_dir, ignore_errors=ignore_errors)
Make ignore_errors False by default
Make ignore_errors False by default
Python
mit
ronekko/chainer,chainer/chainer,okuta/chainer,wkentaro/chainer,ktnyt/chainer,chainer/chainer,okuta/chainer,niboshi/chainer,chainer/chainer,hvy/chainer,chainer/chainer,rezoo/chainer,keisuke-umezawa/chainer,anaruse/chainer,okuta/chainer,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,jnishi/chainer,niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer,jnishi/chainer,pfnet/chainer,tkerola/chainer,hvy/chainer,jnishi/chainer,wkentaro/chainer,okuta/chainer,niboshi/chainer,keisuke-umezawa/chainer,ktnyt/chainer,jnishi/chainer,wkentaro/chainer,hvy/chainer,niboshi/chainer,ktnyt/chainer
python
## Code Before: import contextlib import shutil import tempfile import numpy from chainer.utils import walker_alias # NOQA # import class and function from chainer.utils.conv import get_conv_outsize # NOQA from chainer.utils.conv import get_deconv_outsize # NOQA from chainer.utils.experimental import experimental # NOQA from chainer.utils.walker_alias import WalkerAlias # NOQA def force_array(x, dtype=None): # numpy returns a float value (scalar) when a return value of an operator # is a 0-dimension array. # We need to convert such a value to a 0-dimension array because `Function` # object needs to return an `numpy.ndarray`. if numpy.isscalar(x): if dtype is None: return numpy.array(x) else: return numpy.array(x, dtype) else: if dtype is None: return x else: return x.astype(dtype, copy=False) def force_type(dtype, value): if numpy.isscalar(value): return dtype.type(value) elif value.dtype != dtype: return value.astype(dtype, copy=False) else: return value @contextlib.contextmanager def tempdir(**kwargs): # A context manager that defines a lifetime of a temporary directory. temp_dir = tempfile.mkdtemp(**kwargs) try: yield temp_dir finally: shutil.rmtree(temp_dir, ignore_errors=True) ## Instruction: Make ignore_errors False by default ## Code After: import contextlib import shutil import tempfile import numpy from chainer.utils import walker_alias # NOQA # import class and function from chainer.utils.conv import get_conv_outsize # NOQA from chainer.utils.conv import get_deconv_outsize # NOQA from chainer.utils.experimental import experimental # NOQA from chainer.utils.walker_alias import WalkerAlias # NOQA def force_array(x, dtype=None): # numpy returns a float value (scalar) when a return value of an operator # is a 0-dimension array. # We need to convert such a value to a 0-dimension array because `Function` # object needs to return an `numpy.ndarray`. if numpy.isscalar(x): if dtype is None: return numpy.array(x) else: return numpy.array(x, dtype) else: if dtype is None: return x else: return x.astype(dtype, copy=False) def force_type(dtype, value): if numpy.isscalar(value): return dtype.type(value) elif value.dtype != dtype: return value.astype(dtype, copy=False) else: return value @contextlib.contextmanager def tempdir(**kwargs): # A context manager that defines a lifetime of a temporary directory. ignore_errors = kwargs.pop('ignore_errors', False) temp_dir = tempfile.mkdtemp(**kwargs) try: yield temp_dir finally: shutil.rmtree(temp_dir, ignore_errors=ignore_errors)
... @contextlib.contextmanager def tempdir(**kwargs): # A context manager that defines a lifetime of a temporary directory. ignore_errors = kwargs.pop('ignore_errors', False) temp_dir = tempfile.mkdtemp(**kwargs) try: yield temp_dir finally: shutil.rmtree(temp_dir, ignore_errors=ignore_errors) ...
8e3f0b516896cd8148409d713a8cbe4cd2bddacd
bridgesample/src/main/java/com/livefront/bridgesample/app/BridgeSampleApplication.kt
bridgesample/src/main/java/com/livefront/bridgesample/app/BridgeSampleApplication.kt
package com.livefront.bridgesample.app import android.app.Application import android.os.Bundle import com.evernote.android.state.StateSaver import com.livefront.bridge.Bridge import com.livefront.bridge.SavedStateHandler class BridgeSampleApplication : Application() { override fun onCreate() { super.onCreate() Bridge.initialize( this, object : SavedStateHandler { override fun saveInstanceState(target: Any, state: Bundle) { StateSaver.saveInstanceState(target, state) } override fun restoreInstanceState(target: Any, state: Bundle?) { StateSaver.restoreInstanceState(target, state) } } ) } }
package com.livefront.bridgesample.app import android.app.Application import android.os.Bundle import android.os.Parcelable import android.view.View import com.evernote.android.state.StateSaver import com.livefront.bridge.Bridge import com.livefront.bridge.SavedStateHandler import com.livefront.bridge.ViewSavedStateHandler class BridgeSampleApplication : Application() { override fun onCreate() { super.onCreate() Bridge.initialize( this, object : SavedStateHandler { override fun saveInstanceState(target: Any, state: Bundle) { StateSaver.saveInstanceState(target, state) } override fun restoreInstanceState(target: Any, state: Bundle?) { StateSaver.restoreInstanceState(target, state) } }, object : ViewSavedStateHandler { override fun <T : View> saveInstanceState( target: T, parentState: Parcelable? ): Parcelable = StateSaver.saveInstanceState(target, parentState) override fun <T : View> restoreInstanceState( target: T, state: Parcelable? ): Parcelable? = StateSaver.restoreInstanceState(target, state) } ) } }
Allow the sample app to save the state of Views
Allow the sample app to save the state of Views
Kotlin
apache-2.0
livefront/bridge,livefront/bridge
kotlin
## Code Before: package com.livefront.bridgesample.app import android.app.Application import android.os.Bundle import com.evernote.android.state.StateSaver import com.livefront.bridge.Bridge import com.livefront.bridge.SavedStateHandler class BridgeSampleApplication : Application() { override fun onCreate() { super.onCreate() Bridge.initialize( this, object : SavedStateHandler { override fun saveInstanceState(target: Any, state: Bundle) { StateSaver.saveInstanceState(target, state) } override fun restoreInstanceState(target: Any, state: Bundle?) { StateSaver.restoreInstanceState(target, state) } } ) } } ## Instruction: Allow the sample app to save the state of Views ## Code After: package com.livefront.bridgesample.app import android.app.Application import android.os.Bundle import android.os.Parcelable import android.view.View import com.evernote.android.state.StateSaver import com.livefront.bridge.Bridge import com.livefront.bridge.SavedStateHandler import com.livefront.bridge.ViewSavedStateHandler class BridgeSampleApplication : Application() { override fun onCreate() { super.onCreate() Bridge.initialize( this, object : SavedStateHandler { override fun saveInstanceState(target: Any, state: Bundle) { StateSaver.saveInstanceState(target, state) } override fun restoreInstanceState(target: Any, state: Bundle?) { StateSaver.restoreInstanceState(target, state) } }, object : ViewSavedStateHandler { override fun <T : View> saveInstanceState( target: T, parentState: Parcelable? ): Parcelable = StateSaver.saveInstanceState(target, parentState) override fun <T : View> restoreInstanceState( target: T, state: Parcelable? ): Parcelable? = StateSaver.restoreInstanceState(target, state) } ) } }
... import android.app.Application import android.os.Bundle import android.os.Parcelable import android.view.View import com.evernote.android.state.StateSaver import com.livefront.bridge.Bridge import com.livefront.bridge.SavedStateHandler import com.livefront.bridge.ViewSavedStateHandler class BridgeSampleApplication : Application() { override fun onCreate() { ... override fun restoreInstanceState(target: Any, state: Bundle?) { StateSaver.restoreInstanceState(target, state) } }, object : ViewSavedStateHandler { override fun <T : View> saveInstanceState( target: T, parentState: Parcelable? ): Parcelable = StateSaver.saveInstanceState(target, parentState) override fun <T : View> restoreInstanceState( target: T, state: Parcelable? ): Parcelable? = StateSaver.restoreInstanceState(target, state) } ) } ...
d1ff3bb3ebf0b9615c120918dc2ab4de3912c789
aeron-system-tests/src/test/java/io/aeron/log/EventLogExtension.java
aeron-system-tests/src/test/java/io/aeron/log/EventLogExtension.java
/* * Copyright 2014-2021 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.log; import io.aeron.test.Tests; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; /** * JUnit extension to start and reset the CollectingEventLogReaderAgent. */ public class EventLogExtension implements BeforeEachCallback, AfterEachCallback { /** * {@inheritDoc} */ public void beforeEach(final ExtensionContext context) { final String className = context.getTestClass().map(Class::getSimpleName).orElse("<UNKNOWN>"); final String methodName = context.getTestClass().map(Class::getSimpleName).orElse(context.getDisplayName()); Tests.startLogCollecting("TEST: " + className + "." + methodName); } /** * {@inheritDoc} */ public void afterEach(final ExtensionContext context) { Tests.resetLogCollecting(); } }
/* * Copyright 2014-2021 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.log; import io.aeron.test.Tests; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; import java.lang.reflect.Method; /** * JUnit extension to start and reset the CollectingEventLogReaderAgent. */ public class EventLogExtension implements BeforeEachCallback, AfterEachCallback { /** * {@inheritDoc} */ public void beforeEach(final ExtensionContext context) { final String className = context.getTestClass().map(Class::getSimpleName).orElse("<UNKNOWN>"); final String methodName = context.getTestMethod().map(Method::getName).orElse(context.getDisplayName()); Tests.startLogCollecting("TEST: " + className + "." + methodName); } /** * {@inheritDoc} */ public void afterEach(final ExtensionContext context) { Tests.resetLogCollecting(); } }
Fix name recorded in test event log.
[Java] Fix name recorded in test event log.
Java
apache-2.0
mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron
java
## Code Before: /* * Copyright 2014-2021 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.log; import io.aeron.test.Tests; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; /** * JUnit extension to start and reset the CollectingEventLogReaderAgent. */ public class EventLogExtension implements BeforeEachCallback, AfterEachCallback { /** * {@inheritDoc} */ public void beforeEach(final ExtensionContext context) { final String className = context.getTestClass().map(Class::getSimpleName).orElse("<UNKNOWN>"); final String methodName = context.getTestClass().map(Class::getSimpleName).orElse(context.getDisplayName()); Tests.startLogCollecting("TEST: " + className + "." + methodName); } /** * {@inheritDoc} */ public void afterEach(final ExtensionContext context) { Tests.resetLogCollecting(); } } ## Instruction: [Java] Fix name recorded in test event log. ## Code After: /* * Copyright 2014-2021 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.log; import io.aeron.test.Tests; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; import java.lang.reflect.Method; /** * JUnit extension to start and reset the CollectingEventLogReaderAgent. */ public class EventLogExtension implements BeforeEachCallback, AfterEachCallback { /** * {@inheritDoc} */ public void beforeEach(final ExtensionContext context) { final String className = context.getTestClass().map(Class::getSimpleName).orElse("<UNKNOWN>"); final String methodName = context.getTestMethod().map(Method::getName).orElse(context.getDisplayName()); Tests.startLogCollecting("TEST: " + className + "." + methodName); } /** * {@inheritDoc} */ public void afterEach(final ExtensionContext context) { Tests.resetLogCollecting(); } }
... import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; import java.lang.reflect.Method; /** * JUnit extension to start and reset the CollectingEventLogReaderAgent. */ ... public void beforeEach(final ExtensionContext context) { final String className = context.getTestClass().map(Class::getSimpleName).orElse("<UNKNOWN>"); final String methodName = context.getTestMethod().map(Method::getName).orElse(context.getDisplayName()); Tests.startLogCollecting("TEST: " + className + "." + methodName); } ...
da10b6baa19c1ef3a5f875297187e7248b7460b1
setup.py
setup.py
from setuptools import setup, find_packages import sys long_description = '' if 'upload' in sys.argv: with open('README.rst') as f: long_description = f.read() def extras_require(): return { 'test': [ 'tox>=2.0', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def install_requires(): requires = ['six'] if sys.version_info[:2] < (3, 5): requires.append("typing>=3.5.2") if sys.version_info[0] == 2: requires.append("funcsigs>=1.0.2") return requires setup( name='python-interface', version='1.4.0', description="Pythonic Interface definitions", author="Scott Sanderson", author_email="[email protected]", packages=find_packages(), long_description=long_description, license='Apache 2.0', classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Pre-processors', ], url='https://github.com/ssanderson/interface', install_requires=install_requires(), extras_require=extras_require(), )
from setuptools import setup, find_packages import sys long_description = '' if 'upload' in sys.argv: with open('README.rst') as f: long_description = f.read() def extras_require(): return { 'test': [ 'tox>=2.0', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def install_requires(): return [ 'six', 'typing>=3.5.2;python_version<"3.5"', 'funcsigs>=1.0.2;python_version<"3"' ] setup( name='python-interface', version='1.4.0', description="Pythonic Interface definitions", author="Scott Sanderson", author_email="[email protected]", packages=find_packages(), long_description=long_description, license='Apache 2.0', classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Pre-processors', ], url='https://github.com/ssanderson/interface', install_requires=install_requires(), extras_require=extras_require(), )
Use PEP 508 version markers.
BLD: Use PEP 508 version markers. So that environment tooling, e.g. `pipenv` can use the python version markers when determining dependencies.
Python
apache-2.0
ssanderson/interface
python
## Code Before: from setuptools import setup, find_packages import sys long_description = '' if 'upload' in sys.argv: with open('README.rst') as f: long_description = f.read() def extras_require(): return { 'test': [ 'tox>=2.0', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def install_requires(): requires = ['six'] if sys.version_info[:2] < (3, 5): requires.append("typing>=3.5.2") if sys.version_info[0] == 2: requires.append("funcsigs>=1.0.2") return requires setup( name='python-interface', version='1.4.0', description="Pythonic Interface definitions", author="Scott Sanderson", author_email="[email protected]", packages=find_packages(), long_description=long_description, license='Apache 2.0', classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Pre-processors', ], url='https://github.com/ssanderson/interface', install_requires=install_requires(), extras_require=extras_require(), ) ## Instruction: BLD: Use PEP 508 version markers. So that environment tooling, e.g. `pipenv` can use the python version markers when determining dependencies. ## Code After: from setuptools import setup, find_packages import sys long_description = '' if 'upload' in sys.argv: with open('README.rst') as f: long_description = f.read() def extras_require(): return { 'test': [ 'tox>=2.0', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def install_requires(): return [ 'six', 'typing>=3.5.2;python_version<"3.5"', 'funcsigs>=1.0.2;python_version<"3"' ] setup( name='python-interface', version='1.4.0', description="Pythonic Interface definitions", author="Scott Sanderson", author_email="[email protected]", packages=find_packages(), long_description=long_description, license='Apache 2.0', classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Pre-processors', ], url='https://github.com/ssanderson/interface', install_requires=install_requires(), extras_require=extras_require(), )
# ... existing code ... def install_requires(): return [ 'six', 'typing>=3.5.2;python_version<"3.5"', 'funcsigs>=1.0.2;python_version<"3"' ] setup( # ... rest of the code ...
1dbc30202bddfd4f03bdc9a8005de3c363d2ac1d
blazar/plugins/dummy_vm_plugin.py
blazar/plugins/dummy_vm_plugin.py
from blazar.plugins import base class DummyVMPlugin(base.BasePlugin): """Plugin for VM resource that does nothing.""" resource_type = 'virtual:instance' title = 'Dummy VM Plugin' description = 'This plugin does nothing.' def reserve_resource(self, reservation_id, values): return None def on_start(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be waked up this moment.' % resource_id def on_end(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be deleted this moment.' % resource_id
from blazar.plugins import base class DummyVMPlugin(base.BasePlugin): """Plugin for VM resource that does nothing.""" resource_type = 'virtual:instance' title = 'Dummy VM Plugin' description = 'This plugin does nothing.' def reserve_resource(self, reservation_id, values): return None def update_reservation(self, reservation_id, values): return None def on_start(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be waked up this moment.' % resource_id def on_end(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be deleted this moment.' % resource_id
Add update_reservation to dummy plugin
Add update_reservation to dummy plugin update_reservation is now an abstract method. It needs to be added to all plugins. Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7
Python
apache-2.0
ChameleonCloud/blazar,ChameleonCloud/blazar,openstack/blazar,stackforge/blazar,stackforge/blazar,openstack/blazar
python
## Code Before: from blazar.plugins import base class DummyVMPlugin(base.BasePlugin): """Plugin for VM resource that does nothing.""" resource_type = 'virtual:instance' title = 'Dummy VM Plugin' description = 'This plugin does nothing.' def reserve_resource(self, reservation_id, values): return None def on_start(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be waked up this moment.' % resource_id def on_end(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be deleted this moment.' % resource_id ## Instruction: Add update_reservation to dummy plugin update_reservation is now an abstract method. It needs to be added to all plugins. Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7 ## Code After: from blazar.plugins import base class DummyVMPlugin(base.BasePlugin): """Plugin for VM resource that does nothing.""" resource_type = 'virtual:instance' title = 'Dummy VM Plugin' description = 'This plugin does nothing.' def reserve_resource(self, reservation_id, values): return None def update_reservation(self, reservation_id, values): return None def on_start(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be waked up this moment.' % resource_id def on_end(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be deleted this moment.' % resource_id
... def reserve_resource(self, reservation_id, values): return None def update_reservation(self, reservation_id, values): return None def on_start(self, resource_id): """Dummy VM plugin does nothing.""" return 'VM %s should be waked up this moment.' % resource_id ...
8416a3ed1a6af2d0037f77744d809441591086cd
mrp_bom_location/models/mrp_bom.py
mrp_bom_location/models/mrp_bom.py
from odoo import fields, models class MrpBom(models.Model): _inherit = "mrp.bom" location_id = fields.Many2one( related='picking_type_id.default_location_dest_id', store=True, ) class MrpBomLine(models.Model): _inherit = "mrp.bom.line" location_id = fields.Many2one( related='bom_id.picking_type_id.default_location_src_id', store=True, )
from odoo import fields, models class MrpBom(models.Model): _inherit = "mrp.bom" location_id = fields.Many2one( related='picking_type_id.default_location_dest_id', readonly=True, store=True, ) class MrpBomLine(models.Model): _inherit = "mrp.bom.line" location_id = fields.Many2one( related='bom_id.picking_type_id.default_location_src_id', readonly=True, store=True, )
Make the related location readonly
[IMP] Make the related location readonly
Python
agpl-3.0
OCA/manufacture,OCA/manufacture
python
## Code Before: from odoo import fields, models class MrpBom(models.Model): _inherit = "mrp.bom" location_id = fields.Many2one( related='picking_type_id.default_location_dest_id', store=True, ) class MrpBomLine(models.Model): _inherit = "mrp.bom.line" location_id = fields.Many2one( related='bom_id.picking_type_id.default_location_src_id', store=True, ) ## Instruction: [IMP] Make the related location readonly ## Code After: from odoo import fields, models class MrpBom(models.Model): _inherit = "mrp.bom" location_id = fields.Many2one( related='picking_type_id.default_location_dest_id', readonly=True, store=True, ) class MrpBomLine(models.Model): _inherit = "mrp.bom.line" location_id = fields.Many2one( related='bom_id.picking_type_id.default_location_src_id', readonly=True, store=True, )
# ... existing code ... location_id = fields.Many2one( related='picking_type_id.default_location_dest_id', readonly=True, store=True, ) # ... modified code ... location_id = fields.Many2one( related='bom_id.picking_type_id.default_location_src_id', readonly=True, store=True, ) # ... rest of the code ...
af6f48dba7860bb4c6d77d784e4ccff03692426c
src/test/java/info/u_team/u_team_test/init/TestPotions.java
src/test/java/info/u_team/u_team_test/init/TestPotions.java
package info.u_team.u_team_test.init; import info.u_team.u_team_core.util.registry.BaseRegistryUtil; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.potion.RadiationPotion; import net.minecraft.potion.Potion; import net.minecraftforge.event.RegistryEvent.Register; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestPotions { public static final Potion RADIATION = new RadiationPotion("radiation", 1200, 0); public static final Potion RADIATION_LONG = new RadiationPotion("radiation_long", 2400, 1); public static final Potion RADIATION_EXTREME = new RadiationPotion("radiation_extreme", 1200, 2); @SubscribeEvent public static void register(Register<Potion> event) { BaseRegistryUtil.getAllRegistryEntriesAndApplyNames(TestMod.MODID, Potion.class).forEach(event.getRegistry()::register); } }
package info.u_team.u_team_test.init; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.potion.RadiationPotion; import net.minecraft.potion.Potion; import net.minecraftforge.eventbus.api.IEventBus; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; import net.minecraftforge.registries.*; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestPotions { public static final DeferredRegister<Potion> POTIONS = DeferredRegister.create(ForgeRegistries.POTION_TYPES, TestMod.MODID); public static final Potion RADIATION = new RadiationPotion("radiation", 1200, 0); public static final Potion RADIATION_LONG = new RadiationPotion("radiation_long", 2400, 1); public static final Potion RADIATION_EXTREME = new RadiationPotion("radiation_extreme", 1200, 2); public static void register(IEventBus bus) { POTIONS.register(bus); } }
Add deferred register to potions as they crash the game cuz of the effects
Add deferred register to potions as they crash the game cuz of the effects
Java
apache-2.0
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
java
## Code Before: package info.u_team.u_team_test.init; import info.u_team.u_team_core.util.registry.BaseRegistryUtil; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.potion.RadiationPotion; import net.minecraft.potion.Potion; import net.minecraftforge.event.RegistryEvent.Register; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestPotions { public static final Potion RADIATION = new RadiationPotion("radiation", 1200, 0); public static final Potion RADIATION_LONG = new RadiationPotion("radiation_long", 2400, 1); public static final Potion RADIATION_EXTREME = new RadiationPotion("radiation_extreme", 1200, 2); @SubscribeEvent public static void register(Register<Potion> event) { BaseRegistryUtil.getAllRegistryEntriesAndApplyNames(TestMod.MODID, Potion.class).forEach(event.getRegistry()::register); } } ## Instruction: Add deferred register to potions as they crash the game cuz of the effects ## Code After: package info.u_team.u_team_test.init; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.potion.RadiationPotion; import net.minecraft.potion.Potion; import net.minecraftforge.eventbus.api.IEventBus; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; import net.minecraftforge.registries.*; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestPotions { public static final DeferredRegister<Potion> POTIONS = DeferredRegister.create(ForgeRegistries.POTION_TYPES, TestMod.MODID); public static final Potion RADIATION = new RadiationPotion("radiation", 1200, 0); public static final Potion RADIATION_LONG = new RadiationPotion("radiation_long", 2400, 1); public static final Potion RADIATION_EXTREME = new RadiationPotion("radiation_extreme", 1200, 2); public static void register(IEventBus bus) { POTIONS.register(bus); } }
// ... existing code ... package info.u_team.u_team_test.init; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.potion.RadiationPotion; import net.minecraft.potion.Potion; import net.minecraftforge.eventbus.api.IEventBus; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; import net.minecraftforge.registries.*; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestPotions { public static final DeferredRegister<Potion> POTIONS = DeferredRegister.create(ForgeRegistries.POTION_TYPES, TestMod.MODID); public static final Potion RADIATION = new RadiationPotion("radiation", 1200, 0); public static final Potion RADIATION_LONG = new RadiationPotion("radiation_long", 2400, 1); public static final Potion RADIATION_EXTREME = new RadiationPotion("radiation_extreme", 1200, 2); public static void register(IEventBus bus) { POTIONS.register(bus); } } // ... rest of the code ...
aa8820bd7b78ba5729e0a7a17e43b87bfd033980
tests/runtests.py
tests/runtests.py
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import unittest import util_tests import jsonpickle_test import thirdparty_tests def suite(): suite = unittest.TestSuite() suite.addTest(util_tests.suite()) suite.addTest(jsonpickle_test.suite()) suite.addTest(thirdparty_tests.suite()) return suite def main(): #unittest.main(defaultTest='suite') unittest.TextTestRunner(verbosity=2).run(suite()) if __name__ == '__main__': main()
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import unittest import util_tests import jsonpickle_test import thirdparty_tests def suite(): suite = unittest.TestSuite() suite.addTest(util_tests.suite()) suite.addTest(jsonpickle_test.suite()) suite.addTest(thirdparty_tests.suite()) return suite def main(): #unittest.main(defaultTest='suite') return unittest.TextTestRunner(verbosity=2).run(suite()) if __name__ == '__main__': sys.exit(not main().wasSuccessful())
Return correct status code to shell when tests fail.
Return correct status code to shell when tests fail. When tests fail (due to e.g. missing feedparser), then the exit code of tests/runtests.py is 0, which is treated by shell as success. Patch by Arfrever Frehtes Taifersar Arahesis.
Python
bsd-3-clause
mandx/jsonpickle,dongguangming/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle
python
## Code Before: import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import unittest import util_tests import jsonpickle_test import thirdparty_tests def suite(): suite = unittest.TestSuite() suite.addTest(util_tests.suite()) suite.addTest(jsonpickle_test.suite()) suite.addTest(thirdparty_tests.suite()) return suite def main(): #unittest.main(defaultTest='suite') unittest.TextTestRunner(verbosity=2).run(suite()) if __name__ == '__main__': main() ## Instruction: Return correct status code to shell when tests fail. When tests fail (due to e.g. missing feedparser), then the exit code of tests/runtests.py is 0, which is treated by shell as success. Patch by Arfrever Frehtes Taifersar Arahesis. ## Code After: import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import unittest import util_tests import jsonpickle_test import thirdparty_tests def suite(): suite = unittest.TestSuite() suite.addTest(util_tests.suite()) suite.addTest(jsonpickle_test.suite()) suite.addTest(thirdparty_tests.suite()) return suite def main(): #unittest.main(defaultTest='suite') return unittest.TextTestRunner(verbosity=2).run(suite()) if __name__ == '__main__': sys.exit(not main().wasSuccessful())
// ... existing code ... def main(): #unittest.main(defaultTest='suite') return unittest.TextTestRunner(verbosity=2).run(suite()) if __name__ == '__main__': sys.exit(not main().wasSuccessful()) // ... rest of the code ...
3c34128dec117065d5507da924b24153f9002c39
java/tests/PriorityQueueTest.java
java/tests/PriorityQueueTest.java
import datastructures.PriorityQueue; import java.util.*; public class PriorityQueueTest { public static void main(String[] args) throws Exception { for (int i = 0; i < 5; i ++) { testRandomSort(10000 + ((int) (Math.random() * 10000))); } } private static void testRandomSort(int count) throws Exception { System.out.printf("\tRandomSort (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } int prev = q.poll(); while (!q.isEmpty()) { int curr = q.poll(); if (curr < prev) { throw new Exception("Incorrect poll order"); } prev = curr; } } }
import datastructures.PriorityQueue; import java.util.*; public class PriorityQueueTest { public static void main(String[] args) throws Exception { for (int i = 0; i < 5; i ++) { testRandomSort(10000 + ((int) (Math.random() * 10000))); testRandomSortAndInsert(10000 + ((int) (Math.random() * 10000))); } } private static void testRandomSort(int count) throws Exception { System.out.printf("\tRandomSort (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } int prev = q.poll(); while (!q.isEmpty()) { int curr = q.poll(); if (curr < prev) throw new Exception("Incorrect poll order"); prev = curr; } } private static void testRandomSortAndInsert(int count) throws Exception { System.out.printf("\tRandomSortAndInsert (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } while (!q.isEmpty()) { int curr = q.poll(); if (q.isEmpty()) break; int next = q.poll(); if (curr > next) throw new Exception("Incorrect poll order"); if (Math.random() < 0.5) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } } } }
Add sort and insert heap test
Add sort and insert heap test
Java
mit
vinnyoodles/algorithms,vinnyoodles/algorithms,vinnyoodles/algorithms
java
## Code Before: import datastructures.PriorityQueue; import java.util.*; public class PriorityQueueTest { public static void main(String[] args) throws Exception { for (int i = 0; i < 5; i ++) { testRandomSort(10000 + ((int) (Math.random() * 10000))); } } private static void testRandomSort(int count) throws Exception { System.out.printf("\tRandomSort (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } int prev = q.poll(); while (!q.isEmpty()) { int curr = q.poll(); if (curr < prev) { throw new Exception("Incorrect poll order"); } prev = curr; } } } ## Instruction: Add sort and insert heap test ## Code After: import datastructures.PriorityQueue; import java.util.*; public class PriorityQueueTest { public static void main(String[] args) throws Exception { for (int i = 0; i < 5; i ++) { testRandomSort(10000 + ((int) (Math.random() * 10000))); testRandomSortAndInsert(10000 + ((int) (Math.random() * 10000))); } } private static void testRandomSort(int count) throws Exception { System.out.printf("\tRandomSort (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } int prev = q.poll(); while (!q.isEmpty()) { int curr = q.poll(); if (curr < prev) throw new Exception("Incorrect poll order"); prev = curr; } } private static void testRandomSortAndInsert(int count) throws Exception { System.out.printf("\tRandomSortAndInsert (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } while (!q.isEmpty()) { int curr = q.poll(); if (q.isEmpty()) break; int next = q.poll(); if (curr > next) throw new Exception("Incorrect poll order"); if (Math.random() < 0.5) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } } } }
... public static void main(String[] args) throws Exception { for (int i = 0; i < 5; i ++) { testRandomSort(10000 + ((int) (Math.random() * 10000))); testRandomSortAndInsert(10000 + ((int) (Math.random() * 10000))); } } ... while (!q.isEmpty()) { int curr = q.poll(); if (curr < prev) throw new Exception("Incorrect poll order"); prev = curr; } } private static void testRandomSortAndInsert(int count) throws Exception { System.out.printf("\tRandomSortAndInsert (n = %d)\n", count); PriorityQueue<Integer> q = new PriorityQueue<>(); for (int i = 0; i < count; i ++) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } if (q.isEmpty()) { throw new Exception("Queue is empty"); } if (q.size() != count) { throw new Exception("Queue size is incorrect"); } while (!q.isEmpty()) { int curr = q.poll(); if (q.isEmpty()) break; int next = q.poll(); if (curr > next) throw new Exception("Incorrect poll order"); if (Math.random() < 0.5) { q.add((int) (Math.random() * Integer.MAX_VALUE)); } } } } ...
faf77acc7ddb6a5e2bc198fcfec129f83d2a7678
plotly/tests/test_core/test_file/test_file.py
plotly/tests/test_core/test_file/test_file.py
from nose.tools import raises from nose import with_setup import random import string import requests import plotly.plotly as py import plotly.tools as tls from plotly.exceptions import PlotlyRequestError def _random_filename(): random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)] unique_filename = 'Valid Folder'+''.join(random_chars) return unique_filename def init(): py.sign_in('PythonTest', '9v9f20pext') @with_setup(init) def test_create_folder(): py.file_ops.mkdirs(_random_filename()) @with_setup(init) def test_create_nested_folders(): first_folder = _random_filename() nested_folder = '{0}/{1}'.format(first_folder, _random_filename()) py.file_ops.mkdirs(nested_folder) @with_setup(init) def test_duplicate_folders(): first_folder = _random_filename() py.file_ops.mkdirs(first_folder) try: py.file_ops.mkdirs(first_folder) except PlotlyRequestError as e: if e.status_code != 409: raise e
import random import string from unittest import TestCase import plotly.plotly as py from plotly.exceptions import PlotlyRequestError class FolderAPITestCase(TestCase): def setUp(self): py.sign_in('PythonTest', '9v9f20pext') def _random_filename(self): random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)] unique_filename = 'Valid Folder'+''.join(random_chars) return unique_filename def test_create_folder(self): try: py.file_ops.mkdirs(self._random_filename()) except PlotlyRequestError as e: self.fail('Expected this *not* to fail! Status: {}' .format(e.status_code)) def test_create_nested_folders(self): first_folder = self._random_filename() nested_folder = '{0}/{1}'.format(first_folder, self._random_filename()) try: py.file_ops.mkdirs(nested_folder) except PlotlyRequestError as e: self.fail('Expected this *not* to fail! Status: {}' .format(e.status_code)) def test_duplicate_folders(self): first_folder = self._random_filename() py.file_ops.mkdirs(first_folder) try: py.file_ops.mkdirs(first_folder) except PlotlyRequestError as e: self.assertTrue(400 <= e.status_code < 500) else: self.fail('Expected this to fail!')
Fix failing test and refact to TestCase.
Fix failing test and refact to TestCase.
Python
mit
ee-in/python-api,plotly/plotly.py,plotly/python-api,ee-in/python-api,plotly/python-api,plotly/python-api,plotly/plotly.py,plotly/plotly.py,ee-in/python-api
python
## Code Before: from nose.tools import raises from nose import with_setup import random import string import requests import plotly.plotly as py import plotly.tools as tls from plotly.exceptions import PlotlyRequestError def _random_filename(): random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)] unique_filename = 'Valid Folder'+''.join(random_chars) return unique_filename def init(): py.sign_in('PythonTest', '9v9f20pext') @with_setup(init) def test_create_folder(): py.file_ops.mkdirs(_random_filename()) @with_setup(init) def test_create_nested_folders(): first_folder = _random_filename() nested_folder = '{0}/{1}'.format(first_folder, _random_filename()) py.file_ops.mkdirs(nested_folder) @with_setup(init) def test_duplicate_folders(): first_folder = _random_filename() py.file_ops.mkdirs(first_folder) try: py.file_ops.mkdirs(first_folder) except PlotlyRequestError as e: if e.status_code != 409: raise e ## Instruction: Fix failing test and refact to TestCase. ## Code After: import random import string from unittest import TestCase import plotly.plotly as py from plotly.exceptions import PlotlyRequestError class FolderAPITestCase(TestCase): def setUp(self): py.sign_in('PythonTest', '9v9f20pext') def _random_filename(self): random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)] unique_filename = 'Valid Folder'+''.join(random_chars) return unique_filename def test_create_folder(self): try: py.file_ops.mkdirs(self._random_filename()) except PlotlyRequestError as e: self.fail('Expected this *not* to fail! Status: {}' .format(e.status_code)) def test_create_nested_folders(self): first_folder = self._random_filename() nested_folder = '{0}/{1}'.format(first_folder, self._random_filename()) try: py.file_ops.mkdirs(nested_folder) except PlotlyRequestError as e: self.fail('Expected this *not* to fail! Status: {}' .format(e.status_code)) def test_duplicate_folders(self): first_folder = self._random_filename() py.file_ops.mkdirs(first_folder) try: py.file_ops.mkdirs(first_folder) except PlotlyRequestError as e: self.assertTrue(400 <= e.status_code < 500) else: self.fail('Expected this to fail!')
# ... existing code ... import random import string from unittest import TestCase import plotly.plotly as py from plotly.exceptions import PlotlyRequestError class FolderAPITestCase(TestCase): def setUp(self): py.sign_in('PythonTest', '9v9f20pext') def _random_filename(self): random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)] unique_filename = 'Valid Folder'+''.join(random_chars) return unique_filename def test_create_folder(self): try: py.file_ops.mkdirs(self._random_filename()) except PlotlyRequestError as e: self.fail('Expected this *not* to fail! Status: {}' .format(e.status_code)) def test_create_nested_folders(self): first_folder = self._random_filename() nested_folder = '{0}/{1}'.format(first_folder, self._random_filename()) try: py.file_ops.mkdirs(nested_folder) except PlotlyRequestError as e: self.fail('Expected this *not* to fail! Status: {}' .format(e.status_code)) def test_duplicate_folders(self): first_folder = self._random_filename() py.file_ops.mkdirs(first_folder) try: py.file_ops.mkdirs(first_folder) except PlotlyRequestError as e: self.assertTrue(400 <= e.status_code < 500) else: self.fail('Expected this to fail!') # ... rest of the code ...
0fa0d792bfc8ea22cd807b3b822edeb67a97943a
examples/connection.py
examples/connection.py
import sys import os_client_config from examples import common from openstack import connection def make_connection(opts): occ = os_client_config.OpenStackConfig() cloud = occ.get_one_cloud(opts.cloud, opts) auth = cloud.config['auth'] if 'insecure' in cloud.config: auth['verify'] = cloud.config['insecure'] conn = connection.Connection(preference=opts.user_preferences, **auth) return conn def run_connection(opts): conn = make_connection(opts) print("Connection: %s" % conn) for flavor in conn.compute.flavors(): print(flavor.id + " " + flavor.name) return if __name__ == "__main__": opts = common.setup() sys.exit(common.main(opts, run_connection))
import sys import os_client_config from examples import common from openstack import connection def make_connection(opts): occ = os_client_config.OpenStackConfig() cloud = occ.get_one_cloud(opts.cloud, opts) opts.user_preferences.set_region(opts.user_preferences.ALL, cloud.region) auth = cloud.config['auth'] if 'insecure' in cloud.config: auth['verify'] = cloud.config['insecure'] conn = connection.Connection(preference=opts.user_preferences, **auth) return conn def run_connection(opts): conn = make_connection(opts) print("Connection: %s" % conn) for flavor in conn.compute.flavors(): print(flavor.id + " " + flavor.name) return if __name__ == "__main__": opts = common.setup() sys.exit(common.main(opts, run_connection))
Enable occ cloud region for example
Enable occ cloud region for example Change-Id: I4f6fb7840b684e024ceca37bc5b7e2c858574665
Python
apache-2.0
dtroyer/python-openstacksdk,dudymas/python-openstacksdk,mtougeron/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,mtougeron/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk,openstack/python-openstacksdk
python
## Code Before: import sys import os_client_config from examples import common from openstack import connection def make_connection(opts): occ = os_client_config.OpenStackConfig() cloud = occ.get_one_cloud(opts.cloud, opts) auth = cloud.config['auth'] if 'insecure' in cloud.config: auth['verify'] = cloud.config['insecure'] conn = connection.Connection(preference=opts.user_preferences, **auth) return conn def run_connection(opts): conn = make_connection(opts) print("Connection: %s" % conn) for flavor in conn.compute.flavors(): print(flavor.id + " " + flavor.name) return if __name__ == "__main__": opts = common.setup() sys.exit(common.main(opts, run_connection)) ## Instruction: Enable occ cloud region for example Change-Id: I4f6fb7840b684e024ceca37bc5b7e2c858574665 ## Code After: import sys import os_client_config from examples import common from openstack import connection def make_connection(opts): occ = os_client_config.OpenStackConfig() cloud = occ.get_one_cloud(opts.cloud, opts) opts.user_preferences.set_region(opts.user_preferences.ALL, cloud.region) auth = cloud.config['auth'] if 'insecure' in cloud.config: auth['verify'] = cloud.config['insecure'] conn = connection.Connection(preference=opts.user_preferences, **auth) return conn def run_connection(opts): conn = make_connection(opts) print("Connection: %s" % conn) for flavor in conn.compute.flavors(): print(flavor.id + " " + flavor.name) return if __name__ == "__main__": opts = common.setup() sys.exit(common.main(opts, run_connection))
// ... existing code ... def make_connection(opts): occ = os_client_config.OpenStackConfig() cloud = occ.get_one_cloud(opts.cloud, opts) opts.user_preferences.set_region(opts.user_preferences.ALL, cloud.region) auth = cloud.config['auth'] if 'insecure' in cloud.config: auth['verify'] = cloud.config['insecure'] // ... rest of the code ...
6451808c2dfb3d207bdd69c8aa138554f52cf5ba
python/common-child.py
python/common-child.py
import math import os import random import re import sys # See https://en.wikipedia.org/wiki/Longest_common_subsequence_problem def commonChild(s1, s2): matrix = [[0 for i in range(len(s2) + 1)] for j in range(len(s1)+ 1)] for row_i in range(len(s1)): for col_i in range(len(s2)): if s1[row_i] == s2[col_i]: matrix[row_i + 1][col_i + 1] = matrix[row_i][col_i] + 1 else: matrix[row_i + 1][col_i + 1] = max(matrix[row_i+1][col_i], matrix[row_i][col_i + 1]) return matrix[len(s1)][len(s2)] if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') s1 = input() s2 = input() result = commonChild(s1, s2) fptr.write(str(result) + '\n') fptr.close()
import math import os import random import re import sys # See https://en.wikipedia.org/wiki/Longest_common_subsequence_problem # This solution creates the matrix described in "Traceback approach" def common_child(s1, s2): matrix = [[0 for i in range(len(s2) + 1)] for j in range(len(s1)+ 1)] for row_i in range(len(s1)): for col_i in range(len(s2)): if s1[row_i] == s2[col_i]: matrix[row_i + 1][col_i + 1] = matrix[row_i][col_i] + 1 else: matrix[row_i + 1][col_i + 1] = max(matrix[row_i+1][col_i], matrix[row_i][col_i + 1]) return matrix[len(s1)][len(s2)] if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') s1 = input() s2 = input() result = common_child(s1, s2) fptr.write(str(result) + '\n') fptr.close()
Include dev comment on solution
Include dev comment on solution
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
python
## Code Before: import math import os import random import re import sys # See https://en.wikipedia.org/wiki/Longest_common_subsequence_problem def commonChild(s1, s2): matrix = [[0 for i in range(len(s2) + 1)] for j in range(len(s1)+ 1)] for row_i in range(len(s1)): for col_i in range(len(s2)): if s1[row_i] == s2[col_i]: matrix[row_i + 1][col_i + 1] = matrix[row_i][col_i] + 1 else: matrix[row_i + 1][col_i + 1] = max(matrix[row_i+1][col_i], matrix[row_i][col_i + 1]) return matrix[len(s1)][len(s2)] if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') s1 = input() s2 = input() result = commonChild(s1, s2) fptr.write(str(result) + '\n') fptr.close() ## Instruction: Include dev comment on solution ## Code After: import math import os import random import re import sys # See https://en.wikipedia.org/wiki/Longest_common_subsequence_problem # This solution creates the matrix described in "Traceback approach" def common_child(s1, s2): matrix = [[0 for i in range(len(s2) + 1)] for j in range(len(s1)+ 1)] for row_i in range(len(s1)): for col_i in range(len(s2)): if s1[row_i] == s2[col_i]: matrix[row_i + 1][col_i + 1] = matrix[row_i][col_i] + 1 else: matrix[row_i + 1][col_i + 1] = max(matrix[row_i+1][col_i], matrix[row_i][col_i + 1]) return matrix[len(s1)][len(s2)] if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') s1 = input() s2 = input() result = common_child(s1, s2) fptr.write(str(result) + '\n') fptr.close()
# ... existing code ... import sys # See https://en.wikipedia.org/wiki/Longest_common_subsequence_problem # This solution creates the matrix described in "Traceback approach" def common_child(s1, s2): matrix = [[0 for i in range(len(s2) + 1)] for j in range(len(s1)+ 1)] for row_i in range(len(s1)): # ... modified code ... return matrix[len(s1)][len(s2)] if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') s1 = input() s2 = input() result = common_child(s1, s2) fptr.write(str(result) + '\n') fptr.close() # ... rest of the code ...
fa4cf8c6c7a89a60c0067be53c6b0f29d3d3cde3
biosensorsdb/admin.py
biosensorsdb/admin.py
import biosensorsdb.models from django.contrib import admin class ProjectAdmin(admin.ModelAdmin): search_fields = ['team__name', 'year', 'title', 'abstract', 'inputs__name', 'outputs__name', 'tags__name'] admin.site.register(biosensorsdb.models.Team) admin.site.register(biosensorsdb.models.SensorInput) admin.site.register(biosensorsdb.models.SensorOutput) admin.site.register(biosensorsdb.models.Track) admin.site.register(biosensorsdb.models.Application) admin.site.register(biosensorsdb.models.CompetitionResult) admin.site.register(biosensorsdb.models.Project, ProjectAdmin)
import biosensorsdb.models from django.contrib import admin class ProjectAdmin(admin.ModelAdmin): search_fields = [ 'team__name', 'year', 'title', 'abstract', 'track__name', 'inputs__name', 'outputs__name', 'application__name', 'results__result', 'tags__name', ] admin.site.register(biosensorsdb.models.Team) admin.site.register(biosensorsdb.models.SensorInput) admin.site.register(biosensorsdb.models.SensorOutput) admin.site.register(biosensorsdb.models.Track) admin.site.register(biosensorsdb.models.Application) admin.site.register(biosensorsdb.models.CompetitionResult) admin.site.register(biosensorsdb.models.Project, ProjectAdmin)
Make all project fields searchable.
Make all project fields searchable.
Python
mit
jwintersinger/igembiosensors,jwintersinger/igembiosensors
python
## Code Before: import biosensorsdb.models from django.contrib import admin class ProjectAdmin(admin.ModelAdmin): search_fields = ['team__name', 'year', 'title', 'abstract', 'inputs__name', 'outputs__name', 'tags__name'] admin.site.register(biosensorsdb.models.Team) admin.site.register(biosensorsdb.models.SensorInput) admin.site.register(biosensorsdb.models.SensorOutput) admin.site.register(biosensorsdb.models.Track) admin.site.register(biosensorsdb.models.Application) admin.site.register(biosensorsdb.models.CompetitionResult) admin.site.register(biosensorsdb.models.Project, ProjectAdmin) ## Instruction: Make all project fields searchable. ## Code After: import biosensorsdb.models from django.contrib import admin class ProjectAdmin(admin.ModelAdmin): search_fields = [ 'team__name', 'year', 'title', 'abstract', 'track__name', 'inputs__name', 'outputs__name', 'application__name', 'results__result', 'tags__name', ] admin.site.register(biosensorsdb.models.Team) admin.site.register(biosensorsdb.models.SensorInput) admin.site.register(biosensorsdb.models.SensorOutput) admin.site.register(biosensorsdb.models.Track) admin.site.register(biosensorsdb.models.Application) admin.site.register(biosensorsdb.models.CompetitionResult) admin.site.register(biosensorsdb.models.Project, ProjectAdmin)
... from django.contrib import admin class ProjectAdmin(admin.ModelAdmin): search_fields = [ 'team__name', 'year', 'title', 'abstract', 'track__name', 'inputs__name', 'outputs__name', 'application__name', 'results__result', 'tags__name', ] admin.site.register(biosensorsdb.models.Team) admin.site.register(biosensorsdb.models.SensorInput) ...
4823b42b23580e0a294c1e4ddb3b5b62abf9c7bc
sf/mmck/parameters.py
sf/mmck/parameters.py
from sf.lib.orderedattrdict import OrderedAttrDict class Parameters(OrderedAttrDict): pass class ParameterValues(OrderedAttrDict): pass class Parameter(object): def __init__(self, default=None, label=None): self.default = default self.label = label class Integer(Parameter): def __init__(self, default=None, label=None, range=None, step=1): super(Integer, self).__init__(default, label) self.range = range self.step = step class PathList(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else [] super().__init__(default, label) class String(Parameter): def __init__(self, default=None, label=None, choices=None): super(String, self).__init__(default, label) self.choices = choices
from collections import OrderedDict from sf.lib.orderedattrdict import OrderedAttrDict class Parameters(OrderedAttrDict): pass class ParameterValues(OrderedAttrDict): pass class Parameter(object): def __init__(self, default=None, label=None): self.default = default self.label = label class Integer(Parameter): def __init__(self, default=None, label=None, range=None, step=1): super(Integer, self).__init__(default, label) self.range = range self.step = step class KeyValuePairs(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else OrderedDict() super().__init__(default, label) class PathList(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else [] super().__init__(default, label) class String(Parameter): def __init__(self, default=None, label=None, choices=None): super(String, self).__init__(default, label) self.choices = choices
Add a key/value pairs parameter type
Add a key/value pairs parameter type
Python
mit
metrasynth/solar-flares
python
## Code Before: from sf.lib.orderedattrdict import OrderedAttrDict class Parameters(OrderedAttrDict): pass class ParameterValues(OrderedAttrDict): pass class Parameter(object): def __init__(self, default=None, label=None): self.default = default self.label = label class Integer(Parameter): def __init__(self, default=None, label=None, range=None, step=1): super(Integer, self).__init__(default, label) self.range = range self.step = step class PathList(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else [] super().__init__(default, label) class String(Parameter): def __init__(self, default=None, label=None, choices=None): super(String, self).__init__(default, label) self.choices = choices ## Instruction: Add a key/value pairs parameter type ## Code After: from collections import OrderedDict from sf.lib.orderedattrdict import OrderedAttrDict class Parameters(OrderedAttrDict): pass class ParameterValues(OrderedAttrDict): pass class Parameter(object): def __init__(self, default=None, label=None): self.default = default self.label = label class Integer(Parameter): def __init__(self, default=None, label=None, range=None, step=1): super(Integer, self).__init__(default, label) self.range = range self.step = step class KeyValuePairs(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else OrderedDict() super().__init__(default, label) class PathList(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else [] super().__init__(default, label) class String(Parameter): def __init__(self, default=None, label=None, choices=None): super(String, self).__init__(default, label) self.choices = choices
# ... existing code ... from collections import OrderedDict from sf.lib.orderedattrdict import OrderedAttrDict # ... modified code ... self.step = step class KeyValuePairs(Parameter): def __init__(self, default=None, label=None): default = default if default is not None else OrderedDict() super().__init__(default, label) class PathList(Parameter): def __init__(self, default=None, label=None): # ... rest of the code ...
5d78d4cc871a8ae2e99fabbb058e896ad9bf4c23
fake-koji/src/main/java/org/fakekoji/jobmanager/ManagementResult.java
fake-koji/src/main/java/org/fakekoji/jobmanager/ManagementResult.java
package org.fakekoji.jobmanager; import org.fakekoji.jobmanager.model.JobUpdateResults; import java.util.Objects; public class ManagementResult { public final Object config; public final JobUpdateResults jobUpdateResults; public ManagementResult(Object config, JobUpdateResults jobUpdateResults) { this.config = config; this.jobUpdateResults = jobUpdateResults; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ManagementResult)) return false; ManagementResult that = (ManagementResult) o; return Objects.equals(config, that.config) && Objects.equals(jobUpdateResults, that.jobUpdateResults); } @Override public int hashCode() { return Objects.hash(config, jobUpdateResults); } @Override public String toString() { return "ManagementResult{" + "config=" + config + ", jobupdateResults=" + jobUpdateResults + '}'; } }
package org.fakekoji.jobmanager; import org.fakekoji.jobmanager.model.JobUpdateResults; import java.util.Objects; public class ManagementResult <C> { public final C config; public final JobUpdateResults jobUpdateResults; public ManagementResult(C config, JobUpdateResults jobUpdateResults) { this.config = config; this.jobUpdateResults = jobUpdateResults; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ManagementResult)) return false; ManagementResult that = (ManagementResult) o; return Objects.equals(config, that.config) && Objects.equals(jobUpdateResults, that.jobUpdateResults); } @Override public int hashCode() { return Objects.hash(config, jobUpdateResults); } @Override public String toString() { return "ManagementResult{" + "config=" + config + ", jobupdateResults=" + jobUpdateResults + '}'; } }
Add generic Config type in Management Result
Add generic Config type in Management Result
Java
mit
TheIndifferent/jenkins-scm-koji-plugin,judovana/jenkins-scm-koji-plugin,TheIndifferent/jenkins-scm-koji-plugin,judovana/jenkins-scm-koji-plugin,judovana/jenkins-scm-koji-plugin
java
## Code Before: package org.fakekoji.jobmanager; import org.fakekoji.jobmanager.model.JobUpdateResults; import java.util.Objects; public class ManagementResult { public final Object config; public final JobUpdateResults jobUpdateResults; public ManagementResult(Object config, JobUpdateResults jobUpdateResults) { this.config = config; this.jobUpdateResults = jobUpdateResults; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ManagementResult)) return false; ManagementResult that = (ManagementResult) o; return Objects.equals(config, that.config) && Objects.equals(jobUpdateResults, that.jobUpdateResults); } @Override public int hashCode() { return Objects.hash(config, jobUpdateResults); } @Override public String toString() { return "ManagementResult{" + "config=" + config + ", jobupdateResults=" + jobUpdateResults + '}'; } } ## Instruction: Add generic Config type in Management Result ## Code After: package org.fakekoji.jobmanager; import org.fakekoji.jobmanager.model.JobUpdateResults; import java.util.Objects; public class ManagementResult <C> { public final C config; public final JobUpdateResults jobUpdateResults; public ManagementResult(C config, JobUpdateResults jobUpdateResults) { this.config = config; this.jobUpdateResults = jobUpdateResults; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ManagementResult)) return false; ManagementResult that = (ManagementResult) o; return Objects.equals(config, that.config) && Objects.equals(jobUpdateResults, that.jobUpdateResults); } @Override public int hashCode() { return Objects.hash(config, jobUpdateResults); } @Override public String toString() { return "ManagementResult{" + "config=" + config + ", jobupdateResults=" + jobUpdateResults + '}'; } }
# ... existing code ... import java.util.Objects; public class ManagementResult <C> { public final C config; public final JobUpdateResults jobUpdateResults; public ManagementResult(C config, JobUpdateResults jobUpdateResults) { this.config = config; this.jobUpdateResults = jobUpdateResults; } # ... rest of the code ...
e4b2d60af93fd84407eb7107497b2b500d79f9d7
calexicon/dates/tests/test_distant.py
calexicon/dates/tests/test_distant.py
import unittest from datetime import date as vanilla_date, timedelta from calexicon.dates import DistantDate class TestDistantDate(unittest.TestCase): def test_subtraction(self): dd = DistantDate(10000, 1, 1) self.assertIsInstance(dd - vanilla_date(9999, 1, 1), timedelta) self.assertIsInstance(dd - timedelta(0), DistantDate) def test_subtract_correct_result(self): dd = DistantDate(10000, 1, 2) dd2 = DistantDate(10000, 1, 1) self.assertEqual(dd - dd2, timedelta(days=1)) def test_subtract_vanilla_date_from_distant_date(self): dd = DistantDate(10000, 1, 2) d = vanilla_date(9984, 2, 29) x = 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31 + 15 * 365 + 3 + 2 self.assertEqual(dd - d, timedelta(days=x))
import unittest from datetime import date as vanilla_date, timedelta from calexicon.calendars import ProlepticJulianCalendar from calexicon.dates import DateWithCalendar, DistantDate class TestDistantDate(unittest.TestCase): def test_subtraction(self): dd = DistantDate(10000, 1, 1) self.assertIsInstance(dd - vanilla_date(9999, 1, 1), timedelta) self.assertIsInstance(dd - timedelta(0), DistantDate) def test_subtract_correct_result(self): dd = DistantDate(10000, 1, 2) dd2 = DistantDate(10000, 1, 1) self.assertEqual(dd - dd2, timedelta(days=1)) def test_subtract_vanilla_date_from_distant_date(self): dd = DistantDate(10000, 1, 2) d = vanilla_date(9984, 2, 29) x = 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31 + 15 * 365 + 3 + 2 self.assertEqual(dd - d, timedelta(days=x)) def test_equality(self): dd = DistantDate(2010, 8, 1) ProlepticJulianCalendar().bless(dd) dwc = DateWithCalendar(ProlepticJulianCalendar, DistantDate(2010, 8, 1)) self.assertTrue(dwc == dd)
Add a passing test for equality.
Add a passing test for equality. Narrow down problems with constructing a date far in the future.
Python
apache-2.0
jwg4/qual,jwg4/calexicon
python
## Code Before: import unittest from datetime import date as vanilla_date, timedelta from calexicon.dates import DistantDate class TestDistantDate(unittest.TestCase): def test_subtraction(self): dd = DistantDate(10000, 1, 1) self.assertIsInstance(dd - vanilla_date(9999, 1, 1), timedelta) self.assertIsInstance(dd - timedelta(0), DistantDate) def test_subtract_correct_result(self): dd = DistantDate(10000, 1, 2) dd2 = DistantDate(10000, 1, 1) self.assertEqual(dd - dd2, timedelta(days=1)) def test_subtract_vanilla_date_from_distant_date(self): dd = DistantDate(10000, 1, 2) d = vanilla_date(9984, 2, 29) x = 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31 + 15 * 365 + 3 + 2 self.assertEqual(dd - d, timedelta(days=x)) ## Instruction: Add a passing test for equality. Narrow down problems with constructing a date far in the future. ## Code After: import unittest from datetime import date as vanilla_date, timedelta from calexicon.calendars import ProlepticJulianCalendar from calexicon.dates import DateWithCalendar, DistantDate class TestDistantDate(unittest.TestCase): def test_subtraction(self): dd = DistantDate(10000, 1, 1) self.assertIsInstance(dd - vanilla_date(9999, 1, 1), timedelta) self.assertIsInstance(dd - timedelta(0), DistantDate) def test_subtract_correct_result(self): dd = DistantDate(10000, 1, 2) dd2 = DistantDate(10000, 1, 1) self.assertEqual(dd - dd2, timedelta(days=1)) def test_subtract_vanilla_date_from_distant_date(self): dd = DistantDate(10000, 1, 2) d = vanilla_date(9984, 2, 29) x = 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31 + 15 * 365 + 3 + 2 self.assertEqual(dd - d, timedelta(days=x)) def test_equality(self): dd = DistantDate(2010, 8, 1) ProlepticJulianCalendar().bless(dd) dwc = DateWithCalendar(ProlepticJulianCalendar, DistantDate(2010, 8, 1)) self.assertTrue(dwc == dd)
// ... existing code ... from datetime import date as vanilla_date, timedelta from calexicon.calendars import ProlepticJulianCalendar from calexicon.dates import DateWithCalendar, DistantDate class TestDistantDate(unittest.TestCase): def test_subtraction(self): // ... modified code ... d = vanilla_date(9984, 2, 29) x = 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31 + 15 * 365 + 3 + 2 self.assertEqual(dd - d, timedelta(days=x)) def test_equality(self): dd = DistantDate(2010, 8, 1) ProlepticJulianCalendar().bless(dd) dwc = DateWithCalendar(ProlepticJulianCalendar, DistantDate(2010, 8, 1)) self.assertTrue(dwc == dd) // ... rest of the code ...
429d701cce7ad2cf8fb77f169c4af6f2f27562fd
db_mutex/models.py
db_mutex/models.py
from django.db import models class DBMutex(models.Model): """ Models a mutex lock with a ``lock_id`` and a ``creation_time``. :type lock_id: str :param lock_id: A unique CharField with a max length of 256 :type creation_time: datetime :param creation_time: The creation time of the mutex lock """ lock_id = models.CharField(max_length=255, unique=True) creation_time = models.DateTimeField(auto_now_add=True)
from django.db import models class DBMutex(models.Model): """ Models a mutex lock with a ``lock_id`` and a ``creation_time``. :type lock_id: str :param lock_id: A unique CharField with a max length of 256 :type creation_time: datetime :param creation_time: The creation time of the mutex lock """ lock_id = models.CharField(max_length=255, unique=True) creation_time = models.DateTimeField(auto_now_add=True) class Meta: app_label = "db_mutex"
Declare app_label in model Meta class to work with Django 1.9
Declare app_label in model Meta class to work with Django 1.9 Fixes RemovedInDjango19Warning: Model class db_mutex.models.DBMutex doesn't declare an explicit app_label and either isn't in an application in INSTALLED_APPS or else was imported before its application was loaded. This will no longer be supported in Django 1.9.
Python
mit
minervaproject/django-db-mutex
python
## Code Before: from django.db import models class DBMutex(models.Model): """ Models a mutex lock with a ``lock_id`` and a ``creation_time``. :type lock_id: str :param lock_id: A unique CharField with a max length of 256 :type creation_time: datetime :param creation_time: The creation time of the mutex lock """ lock_id = models.CharField(max_length=255, unique=True) creation_time = models.DateTimeField(auto_now_add=True) ## Instruction: Declare app_label in model Meta class to work with Django 1.9 Fixes RemovedInDjango19Warning: Model class db_mutex.models.DBMutex doesn't declare an explicit app_label and either isn't in an application in INSTALLED_APPS or else was imported before its application was loaded. This will no longer be supported in Django 1.9. ## Code After: from django.db import models class DBMutex(models.Model): """ Models a mutex lock with a ``lock_id`` and a ``creation_time``. :type lock_id: str :param lock_id: A unique CharField with a max length of 256 :type creation_time: datetime :param creation_time: The creation time of the mutex lock """ lock_id = models.CharField(max_length=255, unique=True) creation_time = models.DateTimeField(auto_now_add=True) class Meta: app_label = "db_mutex"
// ... existing code ... """ lock_id = models.CharField(max_length=255, unique=True) creation_time = models.DateTimeField(auto_now_add=True) class Meta: app_label = "db_mutex" // ... rest of the code ...
c1fdc0701a0eb9cfb0a6e64ca80ae5927f017df7
doc/dali-adaptor-doc.h
doc/dali-adaptor-doc.h
/** * @defgroup dali_adaptor DALi Adaptor * * @brief This module is a platform adaptation layer. It initializes and sets up DALi appropriately. * The module provides many platform-related services with its internal module, * platform abstraction. Several signals can be connected to it to keep you informed when * certain platform-related activities occur. * * @ingroup dali * @{ * @defgroup dali_adaptor_framework Adaptor Framework * @brief Classes for the adaption layer. * @} */ #endif /* __DALI_ADAPTOR_DOC_H__ */
/* * Copyright (c) 2016 Samsung Electronics Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * @defgroup dali_adaptor DALi Adaptor * * @brief DALi Adaptor is a platform adaptation layer. * * It initializes and sets up DALi appropriately and * provides many platform-related services with its internal module, * platform abstraction. Several signals can be connected to it to keep you informed when * certain platform-related activities occur. * * @section dali_adaptor_overview Overview * * DALi Adaptor consists of the following groups of API: * * <table> * <tr> * <th>API Group</th> * <th>Description</th> * </tr> * <tr> * <td>@ref dali_adaptor_framework</td> * <td>Classes for the adaption layer.</td> * </tr> * </table> * * @ingroup dali * @{ * @defgroup dali_adaptor_framework Adaptor Framework * @brief Classes for the adaption layer. * @} */ #endif /* __DALI_ADAPTOR_DOC_H__ */
Update doxygen groups and overview description
Update doxygen groups and overview description - Update overview of DALi adaptor Change-Id: Iede36ea40f2a8a85acf0fbe00ab886aaecdc0af0
C
apache-2.0
dalihub/dali-adaptor,dalihub/dali-adaptor,dalihub/dali-adaptor,dalihub/dali-adaptor,dalihub/dali-adaptor
c
## Code Before: /** * @defgroup dali_adaptor DALi Adaptor * * @brief This module is a platform adaptation layer. It initializes and sets up DALi appropriately. * The module provides many platform-related services with its internal module, * platform abstraction. Several signals can be connected to it to keep you informed when * certain platform-related activities occur. * * @ingroup dali * @{ * @defgroup dali_adaptor_framework Adaptor Framework * @brief Classes for the adaption layer. * @} */ #endif /* __DALI_ADAPTOR_DOC_H__ */ ## Instruction: Update doxygen groups and overview description - Update overview of DALi adaptor Change-Id: Iede36ea40f2a8a85acf0fbe00ab886aaecdc0af0 ## Code After: /* * Copyright (c) 2016 Samsung Electronics Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * @defgroup dali_adaptor DALi Adaptor * * @brief DALi Adaptor is a platform adaptation layer. * * It initializes and sets up DALi appropriately and * provides many platform-related services with its internal module, * platform abstraction. Several signals can be connected to it to keep you informed when * certain platform-related activities occur. * * @section dali_adaptor_overview Overview * * DALi Adaptor consists of the following groups of API: * * <table> * <tr> * <th>API Group</th> * <th>Description</th> * </tr> * <tr> * <td>@ref dali_adaptor_framework</td> * <td>Classes for the adaption layer.</td> * </tr> * </table> * * @ingroup dali * @{ * @defgroup dali_adaptor_framework Adaptor Framework * @brief Classes for the adaption layer. * @} */ #endif /* __DALI_ADAPTOR_DOC_H__ */
# ... existing code ... /* * Copyright (c) 2016 Samsung Electronics Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * @defgroup dali_adaptor DALi Adaptor * * @brief DALi Adaptor is a platform adaptation layer. * * It initializes and sets up DALi appropriately and * provides many platform-related services with its internal module, * platform abstraction. Several signals can be connected to it to keep you informed when * certain platform-related activities occur. * * @section dali_adaptor_overview Overview * * DALi Adaptor consists of the following groups of API: * * <table> * <tr> * <th>API Group</th> * <th>Description</th> * </tr> * <tr> * <td>@ref dali_adaptor_framework</td> * <td>Classes for the adaption layer.</td> * </tr> * </table> * * @ingroup dali * @{ # ... rest of the code ...
b8792d9164f669133032eb26ab78281acb17c9c5
appengine/standard/conftest.py
appengine/standard/conftest.py
import os import six # Import py.test hooks and fixtures for App Engine from gcp.testing.appengine import ( login, pytest_configure, pytest_runtest_call, run_tasks, testbed) (login) (pytest_configure) (pytest_runtest_call) (run_tasks) (testbed) def pytest_ignore_collect(path, config): """Skip App Engine tests in python 3 and if no SDK is available.""" if 'appengine/standard' in str(path): if six.PY3: return True if 'GAE_SDK_PATH' not in os.environ: return True return False
import os # Import py.test hooks and fixtures for App Engine from gcp.testing.appengine import ( login, pytest_configure, pytest_runtest_call, run_tasks, testbed) import six (login) (pytest_configure) (pytest_runtest_call) (run_tasks) (testbed) def pytest_ignore_collect(path, config): """Skip App Engine tests in python 3 or if no SDK is available.""" if 'appengine/standard' in str(path): if six.PY3: return True if 'GAE_SDK_PATH' not in os.environ: return True return False
Fix lint issue and review comments
Fix lint issue and review comments Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7
Python
apache-2.0
canglade/NLP,hashems/Mobile-Cloud-Development-Projects,sharbison3/python-docs-samples,JavaRabbit/CS496_capstone,GoogleCloudPlatform/python-docs-samples,sharbison3/python-docs-samples,sharbison3/python-docs-samples,sharbison3/python-docs-samples,hashems/Mobile-Cloud-Development-Projects,JavaRabbit/CS496_capstone,BrandonY/python-docs-samples,BrandonY/python-docs-samples,BrandonY/python-docs-samples,canglade/NLP,GoogleCloudPlatform/python-docs-samples,canglade/NLP,hashems/Mobile-Cloud-Development-Projects,BrandonY/python-docs-samples,GoogleCloudPlatform/python-docs-samples,canglade/NLP,hashems/Mobile-Cloud-Development-Projects,JavaRabbit/CS496_capstone,JavaRabbit/CS496_capstone,GoogleCloudPlatform/python-docs-samples
python
## Code Before: import os import six # Import py.test hooks and fixtures for App Engine from gcp.testing.appengine import ( login, pytest_configure, pytest_runtest_call, run_tasks, testbed) (login) (pytest_configure) (pytest_runtest_call) (run_tasks) (testbed) def pytest_ignore_collect(path, config): """Skip App Engine tests in python 3 and if no SDK is available.""" if 'appengine/standard' in str(path): if six.PY3: return True if 'GAE_SDK_PATH' not in os.environ: return True return False ## Instruction: Fix lint issue and review comments Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7 ## Code After: import os # Import py.test hooks and fixtures for App Engine from gcp.testing.appengine import ( login, pytest_configure, pytest_runtest_call, run_tasks, testbed) import six (login) (pytest_configure) (pytest_runtest_call) (run_tasks) (testbed) def pytest_ignore_collect(path, config): """Skip App Engine tests in python 3 or if no SDK is available.""" if 'appengine/standard' in str(path): if six.PY3: return True if 'GAE_SDK_PATH' not in os.environ: return True return False
# ... existing code ... import os # Import py.test hooks and fixtures for App Engine from gcp.testing.appengine import ( # ... modified code ... pytest_runtest_call, run_tasks, testbed) import six (login) (pytest_configure) ... def pytest_ignore_collect(path, config): """Skip App Engine tests in python 3 or if no SDK is available.""" if 'appengine/standard' in str(path): if six.PY3: return True # ... rest of the code ...
bfbb685854724285bdbfcd583b3d6c41674a6222
vext/syspy/pyinfo.py
vext/syspy/pyinfo.py
import json import os import sys """ Return paths from the system python """ def py_info(): data = { "path": os.environ['PATH'], "sys.path": sys.path } return data if __name__ == '__main__': print json.dumps(py_info())
import json import os import sys """ Return paths from the system python """ def py_info(): data = { "path": os.environ['PATH'].split(os.pathsep), "sys.path": sys.path } return data if __name__ == '__main__': print json.dumps(py_info())
Make sure path is in a list
Make sure path is in a list
Python
mit
stuaxo/vext
python
## Code Before: import json import os import sys """ Return paths from the system python """ def py_info(): data = { "path": os.environ['PATH'], "sys.path": sys.path } return data if __name__ == '__main__': print json.dumps(py_info()) ## Instruction: Make sure path is in a list ## Code After: import json import os import sys """ Return paths from the system python """ def py_info(): data = { "path": os.environ['PATH'].split(os.pathsep), "sys.path": sys.path } return data if __name__ == '__main__': print json.dumps(py_info())
// ... existing code ... """ def py_info(): data = { "path": os.environ['PATH'].split(os.pathsep), "sys.path": sys.path } return data // ... rest of the code ...
7285265a2b6a453f8c802c30eedd84b04b518576
easyfxml/src/main/java/moe/tristan/easyfxml/util/Properties.java
easyfxml/src/main/java/moe/tristan/easyfxml/util/Properties.java
package moe.tristan.easyfxml.util; import java.util.function.Consumer; import java.util.function.Supplier; import javafx.beans.property.Property; public final class Properties { private Properties() { } public static <T, P extends Property<T>> P newPropertyWithCallback(Supplier<P> propertyFactory, Consumer<T> callback) { final P property = propertyFactory.get(); whenPropertyIsSet(property, callback); return property; } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Consumer<T> doWhenSet) { property.addListener((o, prev, cur) -> doWhenSet.accept(cur)); if (property.getValue() != null) { doWhenSet.accept(property.getValue()); } } }
package moe.tristan.easyfxml.util; import java.util.function.Consumer; import java.util.function.Supplier; import javafx.beans.property.Property; public final class Properties { private Properties() { } public static <T, P extends Property<T>> P newPropertyWithCallback(Supplier<P> propertyFactory, Consumer<T> callback) { final P property = propertyFactory.get(); whenPropertyIsSet(property, callback); return property; } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Consumer<T> doWhenSet) { whenPropertyIsSet(property, () -> doWhenSet.accept(property.getValue())); } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Runnable doWhenSet) { property.addListener((o, prev, cur) -> doWhenSet.run()); if (property.getValue() != null) { doWhenSet.run(); } } }
Add basic convenience async property runnable execution
Add basic convenience async property runnable execution
Java
apache-2.0
Tristan971/EasyFXML,Tristan971/EasyFXML
java
## Code Before: package moe.tristan.easyfxml.util; import java.util.function.Consumer; import java.util.function.Supplier; import javafx.beans.property.Property; public final class Properties { private Properties() { } public static <T, P extends Property<T>> P newPropertyWithCallback(Supplier<P> propertyFactory, Consumer<T> callback) { final P property = propertyFactory.get(); whenPropertyIsSet(property, callback); return property; } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Consumer<T> doWhenSet) { property.addListener((o, prev, cur) -> doWhenSet.accept(cur)); if (property.getValue() != null) { doWhenSet.accept(property.getValue()); } } } ## Instruction: Add basic convenience async property runnable execution ## Code After: package moe.tristan.easyfxml.util; import java.util.function.Consumer; import java.util.function.Supplier; import javafx.beans.property.Property; public final class Properties { private Properties() { } public static <T, P extends Property<T>> P newPropertyWithCallback(Supplier<P> propertyFactory, Consumer<T> callback) { final P property = propertyFactory.get(); whenPropertyIsSet(property, callback); return property; } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Consumer<T> doWhenSet) { whenPropertyIsSet(property, () -> doWhenSet.accept(property.getValue())); } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Runnable doWhenSet) { property.addListener((o, prev, cur) -> doWhenSet.run()); if (property.getValue() != null) { doWhenSet.run(); } } }
... } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Consumer<T> doWhenSet) { whenPropertyIsSet(property, () -> doWhenSet.accept(property.getValue())); } public static <T, P extends Property<T>> void whenPropertyIsSet(P property, Runnable doWhenSet) { property.addListener((o, prev, cur) -> doWhenSet.run()); if (property.getValue() != null) { doWhenSet.run(); } } ...
c3a184a188d18f87bad2d7f34a2dfd3a7cca4827
signac/common/errors.py
signac/common/errors.py
class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format( self.args[0]) else: return "Failed to authenticate with host." class ExportError(Error, RuntimeError): pass class FileNotFoundError(Error, FileNotFoundError): pass class FetchError(FileNotFoundError): pass
from . import six class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format( self.args[0]) else: return "Failed to authenticate with host." class ExportError(Error, RuntimeError): pass if six.PY2: class FileNotFoundError(Error, IOError): pass else: class FileNotFoundError(Error, FileNotFoundError): pass class FetchError(FileNotFoundError): pass
Fix py27 issue in error module.
Fix py27 issue in error module. Inherit signac internal FileNotFoundError class from IOError instead of FileNotFoundError in python 2.7.
Python
bsd-3-clause
csadorf/signac,csadorf/signac
python
## Code Before: class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format( self.args[0]) else: return "Failed to authenticate with host." class ExportError(Error, RuntimeError): pass class FileNotFoundError(Error, FileNotFoundError): pass class FetchError(FileNotFoundError): pass ## Instruction: Fix py27 issue in error module. Inherit signac internal FileNotFoundError class from IOError instead of FileNotFoundError in python 2.7. ## Code After: from . import six class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format( self.args[0]) else: return "Failed to authenticate with host." class ExportError(Error, RuntimeError): pass if six.PY2: class FileNotFoundError(Error, IOError): pass else: class FileNotFoundError(Error, FileNotFoundError): pass class FetchError(FileNotFoundError): pass
... from . import six class Error(Exception): pass ... pass if six.PY2: class FileNotFoundError(Error, IOError): pass else: class FileNotFoundError(Error, FileNotFoundError): pass class FetchError(FileNotFoundError): ...
20c8d494519b3d54bc3981aebdad18871deef3cb
src/sentry/auth/manager.py
src/sentry/auth/manager.py
from __future__ import absolute_import, print_function __all__ = ['ProviderManager'] from .exceptions import ProviderNotRegistered # Ideally this and PluginManager abstracted from the same base, but # InstanceManager has become convulated and wasteful class ProviderManager(object): def __init__(self): self.__values = {} def __iter__(self): return self.__values.iteritems() def get(self, name, **kwargs): try: cls = self.__values[name] except KeyError: raise ProviderNotRegistered(name) return cls(name=name, **kwargs) def exists(self, name): return name in self.__values def register(self, name, cls): self.__values[name] = cls def unregister(self, name, cls): if self.__values[name] != cls: raise ProviderNotRegistered(name) del self.__values[name]
from __future__ import absolute_import, print_function __all__ = ['ProviderManager'] from .exceptions import ProviderNotRegistered # Ideally this and PluginManager abstracted from the same base, but # InstanceManager has become convulated and wasteful class ProviderManager(object): def __init__(self): self.__values = {} def __iter__(self): return self.__values.iteritems() def get(self, key, **kwargs): try: cls = self.__values[key] except KeyError: raise ProviderNotRegistered(key) return cls(key=key, **kwargs) def exists(self, key): return key in self.__values def register(self, key, cls): self.__values[key] = cls def unregister(self, key, cls): if self.__values[key] != cls: raise ProviderNotRegistered(key) del self.__values[key]
Revert back to using key
Revert back to using key
Python
bsd-3-clause
argonemyth/sentry,gencer/sentry,JamesMura/sentry,vperron/sentry,nicholasserra/sentry,alexm92/sentry,jokey2k/sentry,zenefits/sentry,jokey2k/sentry,zenefits/sentry,Kryz/sentry,llonchj/sentry,llonchj/sentry,daevaorn/sentry,zenefits/sentry,ewdurbin/sentry,TedaLIEz/sentry,boneyao/sentry,nicholasserra/sentry,felixbuenemann/sentry,BuildingLink/sentry,vperron/sentry,daevaorn/sentry,mitsuhiko/sentry,looker/sentry,JTCunning/sentry,kevinlondon/sentry,looker/sentry,wujuguang/sentry,looker/sentry,ifduyue/sentry,drcapulet/sentry,mvaled/sentry,fuziontech/sentry,fotinakis/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,alexm92/sentry,fotinakis/sentry,ifduyue/sentry,JamesMura/sentry,fotinakis/sentry,kevinastone/sentry,1tush/sentry,jean/sentry,daevaorn/sentry,gencer/sentry,ngonzalvez/sentry,kevinastone/sentry,songyi199111/sentry,alexm92/sentry,hongliang5623/sentry,hongliang5623/sentry,korealerts1/sentry,BuildingLink/sentry,pauloschilling/sentry,mvaled/sentry,fuziontech/sentry,mvaled/sentry,ifduyue/sentry,BayanGroup/sentry,Natim/sentry,ifduyue/sentry,JTCunning/sentry,zenefits/sentry,songyi199111/sentry,wong2/sentry,gencer/sentry,1tush/sentry,beeftornado/sentry,JTCunning/sentry,JackDanger/sentry,imankulov/sentry,jean/sentry,daevaorn/sentry,gg7/sentry,boneyao/sentry,nicholasserra/sentry,jean/sentry,TedaLIEz/sentry,argonemyth/sentry,korealerts1/sentry,wujuguang/sentry,wong2/sentry,Natim/sentry,mvaled/sentry,felixbuenemann/sentry,gencer/sentry,BayanGroup/sentry,jokey2k/sentry,pauloschilling/sentry,gg7/sentry,TedaLIEz/sentry,mitsuhiko/sentry,imankulov/sentry,ewdurbin/sentry,BayanGroup/sentry,beeftornado/sentry,JackDanger/sentry,imankulov/sentry,ifduyue/sentry,Kryz/sentry,JamesMura/sentry,1tush/sentry,kevinastone/sentry,fotinakis/sentry,songyi199111/sentry,BuildingLink/sentry,Natim/sentry,JamesMura/sentry,ngonzalvez/sentry,drcapulet/sentry,drcapulet/sentry,felixbuenemann/sentry,argonemyth/sentry,pauloschilling/sentry,llonchj/sentry,mvaled/sentry,jean/sentry,hongliang5623/sentry,beeftornado/sentry,jean/sentry,mvaled/sentry,JackDanger/sentry,ewdurbin/sentry,boneyao/sentry,wong2/sentry,looker/sentry,Kryz/sentry,gg7/sentry,vperron/sentry,looker/sentry,wujuguang/sentry,JamesMura/sentry,gencer/sentry,BuildingLink/sentry,kevinlondon/sentry,korealerts1/sentry,fuziontech/sentry,kevinlondon/sentry
python
## Code Before: from __future__ import absolute_import, print_function __all__ = ['ProviderManager'] from .exceptions import ProviderNotRegistered # Ideally this and PluginManager abstracted from the same base, but # InstanceManager has become convulated and wasteful class ProviderManager(object): def __init__(self): self.__values = {} def __iter__(self): return self.__values.iteritems() def get(self, name, **kwargs): try: cls = self.__values[name] except KeyError: raise ProviderNotRegistered(name) return cls(name=name, **kwargs) def exists(self, name): return name in self.__values def register(self, name, cls): self.__values[name] = cls def unregister(self, name, cls): if self.__values[name] != cls: raise ProviderNotRegistered(name) del self.__values[name] ## Instruction: Revert back to using key ## Code After: from __future__ import absolute_import, print_function __all__ = ['ProviderManager'] from .exceptions import ProviderNotRegistered # Ideally this and PluginManager abstracted from the same base, but # InstanceManager has become convulated and wasteful class ProviderManager(object): def __init__(self): self.__values = {} def __iter__(self): return self.__values.iteritems() def get(self, key, **kwargs): try: cls = self.__values[key] except KeyError: raise ProviderNotRegistered(key) return cls(key=key, **kwargs) def exists(self, key): return key in self.__values def register(self, key, cls): self.__values[key] = cls def unregister(self, key, cls): if self.__values[key] != cls: raise ProviderNotRegistered(key) del self.__values[key]
... def __iter__(self): return self.__values.iteritems() def get(self, key, **kwargs): try: cls = self.__values[key] except KeyError: raise ProviderNotRegistered(key) return cls(key=key, **kwargs) def exists(self, key): return key in self.__values def register(self, key, cls): self.__values[key] = cls def unregister(self, key, cls): if self.__values[key] != cls: raise ProviderNotRegistered(key) del self.__values[key] ...
09a6e2528f062581c90ed3f3225f19b36f0ac0f9
eve_api/forms.py
eve_api/forms.py
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered")
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
Fix the validation data on the EVEAPIForm
Fix the validation data on the EVEAPIForm
Python
bsd-3-clause
nikdoof/test-auth
python
## Code Before: import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered") ## Instruction: Fix the validation data on the EVEAPIForm ## Code After: import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
// ... existing code ... if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': // ... modified code ... try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id'] // ... rest of the code ...
37f3968096b6f326b3d6f54e8d805677665602aa
src/main.h
src/main.h
/** * * Copyright (C) Tyler Hackett 2016 * * CUDA Triangle Counter * * A quickly-written program to determine all possible combinations of * valid triangles from a grid, allowing for certain coordinates of the * grid to be marked as unusable. * * main.h * * */ #define GRID_WIDTH 4 #define GRID_HEIGHT 4 const uint2 h_invalidPoints[] = { { 0, 0 } }; __global__ void countTriangles(uint2 *validPoints, int *count); bool isInvalidPoint(uint2 p);
/** * * Copyright (C) Tyler Hackett 2016 * * CUDA Triangle Counter * * A quickly-written program to determine all possible combinations of * valid triangles from a grid, allowing for certain coordinates of the * grid to be marked as unusable. * * main.h * * */ #define GRID_WIDTH 4 #define GRID_HEIGHT 4 const uint2 h_invalidPoints[] = { { 0, 0 }, { 0, 3 }, { 3, 0 }, { 3, 3 } }; __global__ void countTriangles(uint2 *validPoints, int *count); bool isInvalidPoint(uint2 p);
Set up parameters to coincide with Java counterpart
Set up parameters to coincide with Java counterpart
C
mit
ByteSyze/CudaTriangleCounter
c
## Code Before: /** * * Copyright (C) Tyler Hackett 2016 * * CUDA Triangle Counter * * A quickly-written program to determine all possible combinations of * valid triangles from a grid, allowing for certain coordinates of the * grid to be marked as unusable. * * main.h * * */ #define GRID_WIDTH 4 #define GRID_HEIGHT 4 const uint2 h_invalidPoints[] = { { 0, 0 } }; __global__ void countTriangles(uint2 *validPoints, int *count); bool isInvalidPoint(uint2 p); ## Instruction: Set up parameters to coincide with Java counterpart ## Code After: /** * * Copyright (C) Tyler Hackett 2016 * * CUDA Triangle Counter * * A quickly-written program to determine all possible combinations of * valid triangles from a grid, allowing for certain coordinates of the * grid to be marked as unusable. * * main.h * * */ #define GRID_WIDTH 4 #define GRID_HEIGHT 4 const uint2 h_invalidPoints[] = { { 0, 0 }, { 0, 3 }, { 3, 0 }, { 3, 3 } }; __global__ void countTriangles(uint2 *validPoints, int *count); bool isInvalidPoint(uint2 p);
... #define GRID_WIDTH 4 #define GRID_HEIGHT 4 const uint2 h_invalidPoints[] = { { 0, 0 }, { 0, 3 }, { 3, 0 }, { 3, 3 } }; __global__ void countTriangles(uint2 *validPoints, int *count); ...
ea0f0f13b5d91c991e593792eee721f5fb7717b8
core/enso/plugins.py
core/enso/plugins.py
import logging import atexit import enso.config _plugins = [] def install( eventManager ): eventManager.registerResponder( _init, "init" ) atexit.register( _shutdown ) def _init(): for moduleName in enso.config.PLUGINS: try: # Import the module; most of this code was taken from the # Python Library Reference documentation for __import__(). module = __import__( moduleName, {}, {}, [], 0 ) components = moduleName.split( "." ) for component in components[1:]: module = getattr( module, component ) module.load() _plugins.append( (module, moduleName) ) except: logging.warn( "Error while loading plugin '%s'." % moduleName ) raise logging.info( "Loaded plugin '%s'." % moduleName ) def _shutdown(): for module, moduleName in _plugins: try: module.unload() except: logging.warn( "Error while unloading plugin '%s'." % moduleName ) raise logging.info( "Unloaded plugin '%s'." % moduleName ) _plugins[:] = []
import logging import enso.config def install( eventManager ): eventManager.registerResponder( _init, "init" ) def _init(): for moduleName in enso.config.PLUGINS: try: # Import the module; most of this code was taken from the # Python Library Reference documentation for __import__(). module = __import__( moduleName, {}, {}, [], 0 ) components = moduleName.split( "." ) for component in components[1:]: module = getattr( module, component ) module.load() except: logging.warn( "Error while loading plugin '%s'." % moduleName ) raise logging.info( "Loaded plugin '%s'." % moduleName )
Change to plugin interface: unload() is no longer part of the protocol, and any unloading a plugin needs to do can just be done by registering an atexit handler.
Change to plugin interface: unload() is no longer part of the protocol, and any unloading a plugin needs to do can just be done by registering an atexit handler. git-svn-id: b6fd099cd3d97ba56ca68c4d1ea7aaa6a131ba03@17 8b7adc99-b347-0410-ae0a-d9e86c8d69b5
Python
bsd-3-clause
roderyc/enso,roderyc/enso,roderyc/enso
python
## Code Before: import logging import atexit import enso.config _plugins = [] def install( eventManager ): eventManager.registerResponder( _init, "init" ) atexit.register( _shutdown ) def _init(): for moduleName in enso.config.PLUGINS: try: # Import the module; most of this code was taken from the # Python Library Reference documentation for __import__(). module = __import__( moduleName, {}, {}, [], 0 ) components = moduleName.split( "." ) for component in components[1:]: module = getattr( module, component ) module.load() _plugins.append( (module, moduleName) ) except: logging.warn( "Error while loading plugin '%s'." % moduleName ) raise logging.info( "Loaded plugin '%s'." % moduleName ) def _shutdown(): for module, moduleName in _plugins: try: module.unload() except: logging.warn( "Error while unloading plugin '%s'." % moduleName ) raise logging.info( "Unloaded plugin '%s'." % moduleName ) _plugins[:] = [] ## Instruction: Change to plugin interface: unload() is no longer part of the protocol, and any unloading a plugin needs to do can just be done by registering an atexit handler. git-svn-id: b6fd099cd3d97ba56ca68c4d1ea7aaa6a131ba03@17 8b7adc99-b347-0410-ae0a-d9e86c8d69b5 ## Code After: import logging import enso.config def install( eventManager ): eventManager.registerResponder( _init, "init" ) def _init(): for moduleName in enso.config.PLUGINS: try: # Import the module; most of this code was taken from the # Python Library Reference documentation for __import__(). module = __import__( moduleName, {}, {}, [], 0 ) components = moduleName.split( "." ) for component in components[1:]: module = getattr( module, component ) module.load() except: logging.warn( "Error while loading plugin '%s'." % moduleName ) raise logging.info( "Loaded plugin '%s'." % moduleName )
# ... existing code ... import logging import enso.config def install( eventManager ): eventManager.registerResponder( _init, "init" ) def _init(): for moduleName in enso.config.PLUGINS: # ... modified code ... module = getattr( module, component ) module.load() except: logging.warn( "Error while loading plugin '%s'." % moduleName ) raise logging.info( "Loaded plugin '%s'." % moduleName ) # ... rest of the code ...
f1e50c1caeeec5b8e443f634534bfed46f26dbdf
2017/async-socket-server/simple-client.py
2017/async-socket-server/simple-client.py
import sys, time import socket def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) sockobj.send(b'foo^1234$jo') sockobj.send(b'sdfsdfsdfsdf^a') sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') buf = b'' while True: buf += sockobj.recv(1024) print(buf) sockobj.close() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
import sys, time import socket import threading class ReadThread(threading.Thread): def __init__(self, sockobj): super().__init__() self.sockobj = sockobj self.bufsize = 8 * 1024 def run(self): while True: buf = self.sockobj.recv(self.bufsize) print('Received:', buf) if b'1111' in buf: break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) rthread = ReadThread(sockobj) rthread.start() sockobj.send(b'foo^1234$jo') time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') time.sleep(1.0) sockobj.close() rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
Modify client to read the socket concurrently
Modify client to read the socket concurrently
Python
unlicense
eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog
python
## Code Before: import sys, time import socket def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) sockobj.send(b'foo^1234$jo') sockobj.send(b'sdfsdfsdfsdf^a') sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') buf = b'' while True: buf += sockobj.recv(1024) print(buf) sockobj.close() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main() ## Instruction: Modify client to read the socket concurrently ## Code After: import sys, time import socket import threading class ReadThread(threading.Thread): def __init__(self, sockobj): super().__init__() self.sockobj = sockobj self.bufsize = 8 * 1024 def run(self): while True: buf = self.sockobj.recv(self.bufsize) print('Received:', buf) if b'1111' in buf: break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) rthread = ReadThread(sockobj) rthread.start() sockobj.send(b'foo^1234$jo') time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') time.sleep(1.0) sockobj.close() rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
// ... existing code ... import sys, time import socket import threading class ReadThread(threading.Thread): def __init__(self, sockobj): super().__init__() self.sockobj = sockobj self.bufsize = 8 * 1024 def run(self): while True: buf = self.sockobj.recv(self.bufsize) print('Received:', buf) if b'1111' in buf: break def make_new_connection(name, host, port): // ... modified code ... sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) rthread = ReadThread(sockobj) rthread.start() sockobj.send(b'foo^1234$jo') time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') time.sleep(1.0) sockobj.close() rthread.join() def main(): // ... rest of the code ...
deae63efe7c5469c68e451f30c23296bb67b80d6
www/db.h
www/db.h
/* * db.h * * Copyright (C) 2011 OpenTech Labs * Andrew Clayton <[email protected]> * Released under the GNU General Public License (GPL) version 3. * See COPYING */ #ifndef _DB_H_ #define _DB_H_ /* For Tokyocabinet (user sessions) */ #include <tcutil.h> #include <tctdb.h> #include <stdbool.h> #include <stdint.h> #include <libgen.h> /* MySQL */ #include <my_global.h> #include <mysql.h> MYSQL *db_conn(void); #endif /* _DB_H_ */
/* * db.h * * Copyright (C) 2011 OpenTech Labs * Andrew Clayton <[email protected]> * Released under the GNU General Public License (GPL) version 3. * See COPYING */ #ifndef _DB_H_ #define _DB_H_ /* For Tokyocabinet (user sessions) */ #include <tcutil.h> #include <tctdb.h> #include <stdbool.h> #include <stdint.h> #include <libgen.h> /* MySQL */ /* * The FCGI printf function seemed to be causing a conflict here, under F16 * with GCC 4.6.2 * * Just undef printf for the my_global stuff and then define it back again. */ #undef printf #include <my_global.h> #define printf FCGI_printf #include <mysql.h> MYSQL *db_conn(void); #endif /* _DB_H_ */
Fix compiler warnings under F16/GCC 4.6.2
Fix compiler warnings under F16/GCC 4.6.2 When compiling receiptomatic under Fedora 16 with GCC 4.6.2 we were getting the following warnings... gcc -Wall -std=c99 -O2 -g -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector --param=ssp-buffer-size=4 -fPIE -c receiptomatic-www.c -D_RECEIPTOMATIC_WWW_ -I../../libctemplate `pkg-config --cflags glib-2.0` `pkg-config --cflags gmime-2.0` `mysql_config --cflags` In file included from /usr/include/mysql/my_global.h:1039:0, from db.h:21, from common.h:31, from receiptomatic-www.c:22: /usr/include/mysql/my_dbug.h:59:3: warning: ‘FCGI_printf’ is an unrecognized format function type [-Wformat] This seemed to be a conflict between the FCGI_printf definition and the use of 'ATTRIBUTE_FORMAT(printf, 1, 2)' in my_global.h The workaround seems to be to undef printf before including my_global.h and then defining it back again afterwards. Signed-off-by: Andrew Clayton <[email protected]>
C
agpl-3.0
ac000/receiptomatic
c
## Code Before: /* * db.h * * Copyright (C) 2011 OpenTech Labs * Andrew Clayton <[email protected]> * Released under the GNU General Public License (GPL) version 3. * See COPYING */ #ifndef _DB_H_ #define _DB_H_ /* For Tokyocabinet (user sessions) */ #include <tcutil.h> #include <tctdb.h> #include <stdbool.h> #include <stdint.h> #include <libgen.h> /* MySQL */ #include <my_global.h> #include <mysql.h> MYSQL *db_conn(void); #endif /* _DB_H_ */ ## Instruction: Fix compiler warnings under F16/GCC 4.6.2 When compiling receiptomatic under Fedora 16 with GCC 4.6.2 we were getting the following warnings... gcc -Wall -std=c99 -O2 -g -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector --param=ssp-buffer-size=4 -fPIE -c receiptomatic-www.c -D_RECEIPTOMATIC_WWW_ -I../../libctemplate `pkg-config --cflags glib-2.0` `pkg-config --cflags gmime-2.0` `mysql_config --cflags` In file included from /usr/include/mysql/my_global.h:1039:0, from db.h:21, from common.h:31, from receiptomatic-www.c:22: /usr/include/mysql/my_dbug.h:59:3: warning: ‘FCGI_printf’ is an unrecognized format function type [-Wformat] This seemed to be a conflict between the FCGI_printf definition and the use of 'ATTRIBUTE_FORMAT(printf, 1, 2)' in my_global.h The workaround seems to be to undef printf before including my_global.h and then defining it back again afterwards. Signed-off-by: Andrew Clayton <[email protected]> ## Code After: /* * db.h * * Copyright (C) 2011 OpenTech Labs * Andrew Clayton <[email protected]> * Released under the GNU General Public License (GPL) version 3. * See COPYING */ #ifndef _DB_H_ #define _DB_H_ /* For Tokyocabinet (user sessions) */ #include <tcutil.h> #include <tctdb.h> #include <stdbool.h> #include <stdint.h> #include <libgen.h> /* MySQL */ /* * The FCGI printf function seemed to be causing a conflict here, under F16 * with GCC 4.6.2 * * Just undef printf for the my_global stuff and then define it back again. */ #undef printf #include <my_global.h> #define printf FCGI_printf #include <mysql.h> MYSQL *db_conn(void); #endif /* _DB_H_ */
... #include <libgen.h> /* MySQL */ /* * The FCGI printf function seemed to be causing a conflict here, under F16 * with GCC 4.6.2 * * Just undef printf for the my_global stuff and then define it back again. */ #undef printf #include <my_global.h> #define printf FCGI_printf #include <mysql.h> MYSQL *db_conn(void); ...
643e04ec09612d6f36dcd98dba44e00011674353
fito/data_store/dict_ds.py
fito/data_store/dict_ds.py
from fito.data_store.base import BaseDataStore class DictDataStore(BaseDataStore): def __init__(self, *args, **kwargs): super(DictDataStore, self).__init__(*args, **kwargs) self.data = {} def iteritems(self): return self.data.iteritems() def save(self, spec, object): self.data[spec] = object def _get(self, spec): if spec not in self.data: raise KeyError("Spec not found: {}".format(spec)) return self.data.get(spec) def iterkeys(self): return self.data.iterkeys()
from fito.data_store.base import BaseDataStore class DictDataStore(BaseDataStore): def __init__(self, *args, **kwargs): super(DictDataStore, self).__init__(*args, **kwargs) self.data = {} def iteritems(self): return self.data.iteritems() def save(self, spec, object): self.data[spec] = object def _get(self, spec): if spec not in self.data: raise KeyError("Spec not found: {}".format(spec)) return self.data.get(spec) def iterkeys(self): return self.data.iterkeys() def clean(self): self.data = {}
Clean method for dict ds
Clean method for dict ds
Python
mit
elsonidoq/fito
python
## Code Before: from fito.data_store.base import BaseDataStore class DictDataStore(BaseDataStore): def __init__(self, *args, **kwargs): super(DictDataStore, self).__init__(*args, **kwargs) self.data = {} def iteritems(self): return self.data.iteritems() def save(self, spec, object): self.data[spec] = object def _get(self, spec): if spec not in self.data: raise KeyError("Spec not found: {}".format(spec)) return self.data.get(spec) def iterkeys(self): return self.data.iterkeys() ## Instruction: Clean method for dict ds ## Code After: from fito.data_store.base import BaseDataStore class DictDataStore(BaseDataStore): def __init__(self, *args, **kwargs): super(DictDataStore, self).__init__(*args, **kwargs) self.data = {} def iteritems(self): return self.data.iteritems() def save(self, spec, object): self.data[spec] = object def _get(self, spec): if spec not in self.data: raise KeyError("Spec not found: {}".format(spec)) return self.data.get(spec) def iterkeys(self): return self.data.iterkeys() def clean(self): self.data = {}
// ... existing code ... def iterkeys(self): return self.data.iterkeys() def clean(self): self.data = {} // ... rest of the code ...
e3fe8c01855e3462ae5e4fd51473a75355fe416d
tests/test_utils.py
tests/test_utils.py
from parsel.utils import shorten from pytest import mark, raises import six @mark.parametrize( 'width,expected', ( (-1, ValueError), (0, u''), (1, u'.'), (2, u'..'), (3, u'...'), (4, u'f...'), (5, u'fo...'), (6, u'foobar'), (7, u'foobar'), ) ) def test_shorten(width, expected): if isinstance(expected, six.string_types): assert shorten(u'foobar', width) == expected else: with raises(expected): shorten(u'foobar', width)
from parsel.utils import shorten, extract_regex from pytest import mark, raises import six @mark.parametrize( 'width,expected', ( (-1, ValueError), (0, u''), (1, u'.'), (2, u'..'), (3, u'...'), (4, u'f...'), (5, u'fo...'), (6, u'foobar'), (7, u'foobar'), ) ) def test_shorten(width, expected): if isinstance(expected, six.string_types): assert shorten(u'foobar', width) == expected else: with raises(expected): shorten(u'foobar', width) @mark.parametrize('regex, text, replace_entities, expected', ( [r'(?P<month>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25, 2019', True, ['October', '25', '2019']], [r'(?P<month>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25 2019', True, ['October', '25', '2019']], [r'(?P<extract>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25 2019', True, ['October']], [r'\w+\s*\d+\s*\,?\s*\d+', 'October 25 2019', True, ['October 25 2019']], [r'^.*$', '&quot;sometext&quot; &amp; &quot;moretext&quot;', True, ['"sometext" &amp; "moretext"']], [r'^.*$', '&quot;sometext&quot; &amp; &quot;moretext&quot;', False, ['&quot;sometext&quot; &amp; &quot;moretext&quot;']], )) def test_extract_regex(regex, text, replace_entities, expected): assert extract_regex(regex, text, replace_entities) == expected
Add tests for `extract_regex` function.
Add tests for `extract_regex` function.
Python
bsd-3-clause
scrapy/parsel
python
## Code Before: from parsel.utils import shorten from pytest import mark, raises import six @mark.parametrize( 'width,expected', ( (-1, ValueError), (0, u''), (1, u'.'), (2, u'..'), (3, u'...'), (4, u'f...'), (5, u'fo...'), (6, u'foobar'), (7, u'foobar'), ) ) def test_shorten(width, expected): if isinstance(expected, six.string_types): assert shorten(u'foobar', width) == expected else: with raises(expected): shorten(u'foobar', width) ## Instruction: Add tests for `extract_regex` function. ## Code After: from parsel.utils import shorten, extract_regex from pytest import mark, raises import six @mark.parametrize( 'width,expected', ( (-1, ValueError), (0, u''), (1, u'.'), (2, u'..'), (3, u'...'), (4, u'f...'), (5, u'fo...'), (6, u'foobar'), (7, u'foobar'), ) ) def test_shorten(width, expected): if isinstance(expected, six.string_types): assert shorten(u'foobar', width) == expected else: with raises(expected): shorten(u'foobar', width) @mark.parametrize('regex, text, replace_entities, expected', ( [r'(?P<month>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25, 2019', True, ['October', '25', '2019']], [r'(?P<month>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25 2019', True, ['October', '25', '2019']], [r'(?P<extract>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25 2019', True, ['October']], [r'\w+\s*\d+\s*\,?\s*\d+', 'October 25 2019', True, ['October 25 2019']], [r'^.*$', '&quot;sometext&quot; &amp; &quot;moretext&quot;', True, ['"sometext" &amp; "moretext"']], [r'^.*$', '&quot;sometext&quot; &amp; &quot;moretext&quot;', False, ['&quot;sometext&quot; &amp; &quot;moretext&quot;']], )) def test_extract_regex(regex, text, replace_entities, expected): assert extract_regex(regex, text, replace_entities) == expected
... from parsel.utils import shorten, extract_regex from pytest import mark, raises import six ... else: with raises(expected): shorten(u'foobar', width) @mark.parametrize('regex, text, replace_entities, expected', ( [r'(?P<month>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25, 2019', True, ['October', '25', '2019']], [r'(?P<month>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25 2019', True, ['October', '25', '2019']], [r'(?P<extract>\w+)\s*(?P<day>\d+)\s*\,?\s*(?P<year>\d+)', 'October 25 2019', True, ['October']], [r'\w+\s*\d+\s*\,?\s*\d+', 'October 25 2019', True, ['October 25 2019']], [r'^.*$', '&quot;sometext&quot; &amp; &quot;moretext&quot;', True, ['"sometext" &amp; "moretext"']], [r'^.*$', '&quot;sometext&quot; &amp; &quot;moretext&quot;', False, ['&quot;sometext&quot; &amp; &quot;moretext&quot;']], )) def test_extract_regex(regex, text, replace_entities, expected): assert extract_regex(regex, text, replace_entities) == expected ...
0004bde0d40dfea167d76a83c20acfffc0abfa28
poyo/__init__.py
poyo/__init__.py
from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' __all__ = ['parse_string', 'PoyoException']
import logging from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = ['parse_string', 'PoyoException']
Add NullHandler to poyo root logger
Add NullHandler to poyo root logger
Python
mit
hackebrot/poyo
python
## Code Before: from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' __all__ = ['parse_string', 'PoyoException'] ## Instruction: Add NullHandler to poyo root logger ## Code After: import logging from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = ['parse_string', 'PoyoException']
# ... existing code ... import logging from .exceptions import PoyoException from .parser import parse_string # ... modified code ... __email__ = '[email protected]' __version__ = '0.3.0' logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = ['parse_string', 'PoyoException'] # ... rest of the code ...
9796e4cc76c93344578a8fbed88feb6c552d7ba4
thcrap/src/global.c
thcrap/src/global.c
/** * Touhou Community Reliant Automatic Patcher * Main DLL * * ---- * * Globals, compile-time constants and runconfig abstractions. */ #include "thcrap.h" CRITICAL_SECTION cs_file_access; json_t* run_cfg = NULL; const char* PROJECT_NAME(void) { return "Touhou Community Reliant Automatic Patcher"; } const char* PROJECT_NAME_SHORT(void) { return "thcrap"; } const DWORD PROJECT_VERSION(void) { return 0x20131025; } const char* PROJECT_VERSION_STRING(void) { static char ver_str[11] = {0}; if(!ver_str[0]) { str_hexdate_format(ver_str, PROJECT_VERSION()); } return ver_str; } json_t* runconfig_get(void) { return run_cfg; } void runconfig_set(json_t *new_run_cfg) { run_cfg = new_run_cfg; } void* runconfig_func_get(const char *name) { json_t *funcs = json_object_get(run_cfg, "funcs"); return (void*)json_object_get_hex(funcs, name); }
/** * Touhou Community Reliant Automatic Patcher * Main DLL * * ---- * * Globals, compile-time constants and runconfig abstractions. */ #include "thcrap.h" CRITICAL_SECTION cs_file_access; json_t* run_cfg = NULL; const char* PROJECT_NAME(void) { return "Touhou Community Reliant Automatic Patcher"; } const char* PROJECT_NAME_SHORT(void) { return "thcrap"; } const DWORD PROJECT_VERSION(void) { return 0x20131025; } const char* PROJECT_VERSION_STRING(void) { static char ver_str[11] = {0}; if(!ver_str[0]) { str_hexdate_format(ver_str, PROJECT_VERSION()); } return ver_str; } json_t* runconfig_get(void) { return run_cfg; } void runconfig_set(json_t *new_run_cfg) { run_cfg = new_run_cfg; json_incref(run_cfg); } void* runconfig_func_get(const char *name) { json_t *funcs = json_object_get(run_cfg, "funcs"); return (void*)json_object_get_hex(funcs, name); }
Increment the run configuration's reference counter.
runconfig_set(): Increment the run configuration's reference counter. Fixes bug #41 (https://bitbucket.org/nmlgc/thpatch-bugs/issue/41).
C
unlicense
thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap
c
## Code Before: /** * Touhou Community Reliant Automatic Patcher * Main DLL * * ---- * * Globals, compile-time constants and runconfig abstractions. */ #include "thcrap.h" CRITICAL_SECTION cs_file_access; json_t* run_cfg = NULL; const char* PROJECT_NAME(void) { return "Touhou Community Reliant Automatic Patcher"; } const char* PROJECT_NAME_SHORT(void) { return "thcrap"; } const DWORD PROJECT_VERSION(void) { return 0x20131025; } const char* PROJECT_VERSION_STRING(void) { static char ver_str[11] = {0}; if(!ver_str[0]) { str_hexdate_format(ver_str, PROJECT_VERSION()); } return ver_str; } json_t* runconfig_get(void) { return run_cfg; } void runconfig_set(json_t *new_run_cfg) { run_cfg = new_run_cfg; } void* runconfig_func_get(const char *name) { json_t *funcs = json_object_get(run_cfg, "funcs"); return (void*)json_object_get_hex(funcs, name); } ## Instruction: runconfig_set(): Increment the run configuration's reference counter. Fixes bug #41 (https://bitbucket.org/nmlgc/thpatch-bugs/issue/41). ## Code After: /** * Touhou Community Reliant Automatic Patcher * Main DLL * * ---- * * Globals, compile-time constants and runconfig abstractions. */ #include "thcrap.h" CRITICAL_SECTION cs_file_access; json_t* run_cfg = NULL; const char* PROJECT_NAME(void) { return "Touhou Community Reliant Automatic Patcher"; } const char* PROJECT_NAME_SHORT(void) { return "thcrap"; } const DWORD PROJECT_VERSION(void) { return 0x20131025; } const char* PROJECT_VERSION_STRING(void) { static char ver_str[11] = {0}; if(!ver_str[0]) { str_hexdate_format(ver_str, PROJECT_VERSION()); } return ver_str; } json_t* runconfig_get(void) { return run_cfg; } void runconfig_set(json_t *new_run_cfg) { run_cfg = new_run_cfg; json_incref(run_cfg); } void* runconfig_func_get(const char *name) { json_t *funcs = json_object_get(run_cfg, "funcs"); return (void*)json_object_get_hex(funcs, name); }
// ... existing code ... void runconfig_set(json_t *new_run_cfg) { run_cfg = new_run_cfg; json_incref(run_cfg); } void* runconfig_func_get(const char *name) // ... rest of the code ...
74fde273d79248d4ad1c0cfd47d2861c83b50cbd
kolibri/auth/migrations/0007_auto_20171226_1125.py
kolibri/auth/migrations/0007_auto_20171226_1125.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('kolibriauth', '0006_auto_20171206_1207'), ] operations = [ migrations.AlterField( model_name='facilitydataset', name='preset', field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50), ), migrations.AlterUniqueTogether( name='facilityuser', unique_together=set([]), ), ]
from __future__ import unicode_literals from django.db import migrations, models # This is necessary because: # 1. The list generator has an unpredictable order, and when items swap places # then this would be picked up as a change in Django if we had used # 2. These choices can be changed in facility_configuration_presets.json # and such change should not warrant warnings that models are inconsistent # as it has no impact. # Notice: The 'choices' property of a field does NOT have any impact on DB # See: https://github.com/learningequality/kolibri/pull/3180 from ..constants.facility_presets import choices as facility_choices class Migration(migrations.Migration): dependencies = [ ('kolibriauth', '0006_auto_20171206_1207'), ] operations = [ migrations.AlterField( model_name='facilitydataset', name='preset', field=models.CharField(choices=facility_choices, default='nonformal', max_length=50), ), migrations.AlterUniqueTogether( name='facilityuser', unique_together=set([]), ), ]
Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies
Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies
Python
mit
christianmemije/kolibri,christianmemije/kolibri,indirectlylit/kolibri,benjaoming/kolibri,lyw07/kolibri,learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,lyw07/kolibri,indirectlylit/kolibri,christianmemije/kolibri,jonboiser/kolibri,jonboiser/kolibri,DXCanas/kolibri,lyw07/kolibri,mrpau/kolibri,mrpau/kolibri,benjaoming/kolibri,indirectlylit/kolibri,christianmemije/kolibri,jonboiser/kolibri,learningequality/kolibri,DXCanas/kolibri,lyw07/kolibri,learningequality/kolibri,benjaoming/kolibri,DXCanas/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,indirectlylit/kolibri,benjaoming/kolibri
python
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('kolibriauth', '0006_auto_20171206_1207'), ] operations = [ migrations.AlterField( model_name='facilitydataset', name='preset', field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50), ), migrations.AlterUniqueTogether( name='facilityuser', unique_together=set([]), ), ] ## Instruction: Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies ## Code After: from __future__ import unicode_literals from django.db import migrations, models # This is necessary because: # 1. The list generator has an unpredictable order, and when items swap places # then this would be picked up as a change in Django if we had used # 2. These choices can be changed in facility_configuration_presets.json # and such change should not warrant warnings that models are inconsistent # as it has no impact. # Notice: The 'choices' property of a field does NOT have any impact on DB # See: https://github.com/learningequality/kolibri/pull/3180 from ..constants.facility_presets import choices as facility_choices class Migration(migrations.Migration): dependencies = [ ('kolibriauth', '0006_auto_20171206_1207'), ] operations = [ migrations.AlterField( model_name='facilitydataset', name='preset', field=models.CharField(choices=facility_choices, default='nonformal', max_length=50), ), migrations.AlterUniqueTogether( name='facilityuser', unique_together=set([]), ), ]
// ... existing code ... from __future__ import unicode_literals from django.db import migrations, models # This is necessary because: # 1. The list generator has an unpredictable order, and when items swap places # then this would be picked up as a change in Django if we had used # 2. These choices can be changed in facility_configuration_presets.json # and such change should not warrant warnings that models are inconsistent # as it has no impact. # Notice: The 'choices' property of a field does NOT have any impact on DB # See: https://github.com/learningequality/kolibri/pull/3180 from ..constants.facility_presets import choices as facility_choices class Migration(migrations.Migration): // ... modified code ... migrations.AlterField( model_name='facilitydataset', name='preset', field=models.CharField(choices=facility_choices, default='nonformal', max_length=50), ), migrations.AlterUniqueTogether( name='facilityuser', // ... rest of the code ...
f84df81f060746567b611a2071ff1a161fcf3206
generic_links/models.py
generic_links/models.py
from django import VERSION from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ def get_user_model_fk_ref(): """Get user model depending on Django version.""" ver = VERSION if ver[0] >= 1 and ver[1] >= 5: return settings.AUTH_USER_MODEL else: return 'auth.User' class GenericLink(models.Model): """ Relates an object with an url and its data """ content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField(db_index=True) content_object = GenericForeignKey() url = models.URLField() title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey(get_user_model_fk_ref(), null=True, blank=True, on_delete=models.SET_NULL) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True) class Meta: ordering = ("-created_at", ) verbose_name = _("Generic Link") verbose_name_plural = _("Generic Links") def __unicode__(self): return self.url
from django.contrib.auth import get_user_model from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ class GenericLink(models.Model): """ Relates an object with an url and its data """ content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField(db_index=True) content_object = GenericForeignKey() url = models.URLField() title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey(get_user_model(), null=True, blank=True, on_delete=models.SET_NULL) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True) class Meta: ordering = ("-created_at", ) verbose_name = _("Generic Link") verbose_name_plural = _("Generic Links") def __unicode__(self): return self.url
Make User model compatible with Django 2.x
Make User model compatible with Django 2.x
Python
bsd-3-clause
matagus/django-generic-links,matagus/django-generic-links
python
## Code Before: from django import VERSION from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ def get_user_model_fk_ref(): """Get user model depending on Django version.""" ver = VERSION if ver[0] >= 1 and ver[1] >= 5: return settings.AUTH_USER_MODEL else: return 'auth.User' class GenericLink(models.Model): """ Relates an object with an url and its data """ content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField(db_index=True) content_object = GenericForeignKey() url = models.URLField() title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey(get_user_model_fk_ref(), null=True, blank=True, on_delete=models.SET_NULL) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True) class Meta: ordering = ("-created_at", ) verbose_name = _("Generic Link") verbose_name_plural = _("Generic Links") def __unicode__(self): return self.url ## Instruction: Make User model compatible with Django 2.x ## Code After: from django.contrib.auth import get_user_model from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ class GenericLink(models.Model): """ Relates an object with an url and its data """ content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField(db_index=True) content_object = GenericForeignKey() url = models.URLField() title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey(get_user_model(), null=True, blank=True, on_delete=models.SET_NULL) created_at = models.DateTimeField(auto_now_add=True, db_index=True) is_external = models.BooleanField(default=True, db_index=True) class Meta: ordering = ("-created_at", ) verbose_name = _("Generic Link") verbose_name_plural = _("Generic Links") def __unicode__(self): return self.url
// ... existing code ... from django.contrib.auth import get_user_model from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ class GenericLink(models.Model): // ... modified code ... title = models.CharField(max_length=200) description = models.TextField(max_length=1000, null=True, blank=True) user = models.ForeignKey(get_user_model(), null=True, blank=True, on_delete=models.SET_NULL) created_at = models.DateTimeField(auto_now_add=True, db_index=True) // ... rest of the code ...
2ad47f6ce00246cbf54639438d9279b8a7fa9b29
python/tests/t_envoy_logs.py
python/tests/t_envoy_logs.py
import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^ACCESS \\[.*?\\] \\\"GET \\/ambassador') class EnvoyLogPathTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line)
import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^MY_REQUEST 200 .*') class EnvoyLogTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} envoy_log_format: {self.log_format} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}"
Test for Envoy logs format
Test for Envoy logs format Signed-off-by: Alvaro Saurin <[email protected]>
Python
apache-2.0
datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador
python
## Code Before: import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^ACCESS \\[.*?\\] \\\"GET \\/ambassador') class EnvoyLogPathTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line) ## Instruction: Test for Envoy logs format Signed-off-by: Alvaro Saurin <[email protected]> ## Code After: import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^MY_REQUEST 200 .*') class EnvoyLogTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} envoy_log_format: {self.log_format} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}"
... from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^MY_REQUEST 200 .*') class EnvoyLogTest(AmbassadorTest): target: ServiceType log_path: str ... def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"' def config(self): yield self, self.format(""" ... ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} envoy_log_format: {self.log_format} """) def check(self): ... pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}" ...