commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
258ac421fb643d56725d353d07101e0c1dd2c8b6
src/main/java/techcable/minecraft/npclib/NPCLib.java
src/main/java/techcable/minecraft/npclib/NPCLib.java
package techcable.minecraft.npclib; import techcable.minecraft.npclib.citizens.CitizensNPCRegistry; import net.citizensnpcs.api.CitizensAPI; public class NPCLib { private NPCLib() {}; public static NPCRegistry getNPCRegistry() { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(); } else { throw new UnsupportedOperationException(); } } public static boolean hasCitizens() { try { Class.forName("net.citizensnpcs.api.CitizensAPI"); } catch (ClassNotFoundException e) { return false; } return CitizensAPI.hasImplementation(); } public static boolean hasNMS() { return false; } }
package techcable.minecraft.npclib; import techcable.minecraft.npclib.citizens.CitizensNPCRegistry; import net.citizensnpcs.api.CitizensAPI; public class NPCLib { private NPCLib() {}; public static NPCRegistry getNPCRegistry() { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(); } else { throw new UnsupportedOperationException(); } } public static NPCRegistry getNPCRegistry(String name) { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(name); } else { throw new UnsupportedOperationException(); } } public static boolean hasCitizens() { try { Class.forName("net.citizensnpcs.api.CitizensAPI"); } catch (ClassNotFoundException e) { return false; } return CitizensAPI.hasImplementation(); } public static boolean hasNMS() { return false; } }
Add support for named registries
Add support for named registries
Java
mit
TechzoneMC/NPCLib,ProgrammerDan/NPCLib
java
## Code Before: package techcable.minecraft.npclib; import techcable.minecraft.npclib.citizens.CitizensNPCRegistry; import net.citizensnpcs.api.CitizensAPI; public class NPCLib { private NPCLib() {}; public static NPCRegistry getNPCRegistry() { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(); } else { throw new UnsupportedOperationException(); } } public static boolean hasCitizens() { try { Class.forName("net.citizensnpcs.api.CitizensAPI"); } catch (ClassNotFoundException e) { return false; } return CitizensAPI.hasImplementation(); } public static boolean hasNMS() { return false; } } ## Instruction: Add support for named registries ## Code After: package techcable.minecraft.npclib; import techcable.minecraft.npclib.citizens.CitizensNPCRegistry; import net.citizensnpcs.api.CitizensAPI; public class NPCLib { private NPCLib() {}; public static NPCRegistry getNPCRegistry() { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(); } else { throw new UnsupportedOperationException(); } } public static NPCRegistry getNPCRegistry(String name) { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(name); } else { throw new UnsupportedOperationException(); } } public static boolean hasCitizens() { try { Class.forName("net.citizensnpcs.api.CitizensAPI"); } catch (ClassNotFoundException e) { return false; } return CitizensAPI.hasImplementation(); } public static boolean hasNMS() { return false; } }
// ... existing code ... } } public static NPCRegistry getNPCRegistry(String name) { if (hasCitizens()) { return CitizensNPCRegistry.getRegistry(name); } else { throw new UnsupportedOperationException(); } } public static boolean hasCitizens() { try { Class.forName("net.citizensnpcs.api.CitizensAPI"); // ... rest of the code ...
431ca4f2d44656ef9f97be50718712c6f3a0fa9b
qtawesome/tests/test_qtawesome.py
qtawesome/tests/test_qtawesome.py
# Standard library imports import subprocess # Test Library imports import pytest # Local imports import qtawesome as qta from qtawesome.iconic_font import IconicFont def test_segfault_import(): output_number = subprocess.call('python -c "import qtawesome ' '; qtawesome.icon()"', shell=True) assert output_number == 0 def test_unique_font_family_name(qtbot): """ Test that each font used by qtawesome has a unique name. If this test fails, this probably means that you need to rename the family name of some fonts. Please see PR #98 for more details on why it is necessary and on how to do this. Regression test for Issue #107 """ resource = qta._instance() assert isinstance(resource, IconicFont) prefixes = list(resource.fontname.keys()) assert prefixes fontnames = set(resource.fontname.values()) assert fontnames assert len(prefixes) == len(fontnames) if __name__ == "__main__": pytest.main()
# Standard library imports import subprocess import collections # Test Library imports import pytest # Local imports import qtawesome as qta from qtawesome.iconic_font import IconicFont def test_segfault_import(): output_number = subprocess.call('python -c "import qtawesome ' '; qtawesome.icon()"', shell=True) assert output_number == 0 def test_unique_font_family_name(qtbot): """ Test that each font used by qtawesome has a unique name. If this test fails, this probably means that you need to rename the family name of some fonts. Please see PR #98 for more details on why it is necessary and on how to do this. Regression test for Issue #107 """ resource = qta._instance() assert isinstance(resource, IconicFont) # Check that the fonts were loaded successfully. fontnames = resource.fontname.values() assert fontnames # Check that qtawesome does not load fonts with duplicate family names. duplicates = [fontname for fontname, count in collections.Counter(fontnames).items() if count > 1] assert not duplicates if __name__ == "__main__": pytest.main()
Make the test more comprehensive.
Make the test more comprehensive.
Python
mit
spyder-ide/qtawesome
python
## Code Before: # Standard library imports import subprocess # Test Library imports import pytest # Local imports import qtawesome as qta from qtawesome.iconic_font import IconicFont def test_segfault_import(): output_number = subprocess.call('python -c "import qtawesome ' '; qtawesome.icon()"', shell=True) assert output_number == 0 def test_unique_font_family_name(qtbot): """ Test that each font used by qtawesome has a unique name. If this test fails, this probably means that you need to rename the family name of some fonts. Please see PR #98 for more details on why it is necessary and on how to do this. Regression test for Issue #107 """ resource = qta._instance() assert isinstance(resource, IconicFont) prefixes = list(resource.fontname.keys()) assert prefixes fontnames = set(resource.fontname.values()) assert fontnames assert len(prefixes) == len(fontnames) if __name__ == "__main__": pytest.main() ## Instruction: Make the test more comprehensive. ## Code After: # Standard library imports import subprocess import collections # Test Library imports import pytest # Local imports import qtawesome as qta from qtawesome.iconic_font import IconicFont def test_segfault_import(): output_number = subprocess.call('python -c "import qtawesome ' '; qtawesome.icon()"', shell=True) assert output_number == 0 def test_unique_font_family_name(qtbot): """ Test that each font used by qtawesome has a unique name. If this test fails, this probably means that you need to rename the family name of some fonts. Please see PR #98 for more details on why it is necessary and on how to do this. Regression test for Issue #107 """ resource = qta._instance() assert isinstance(resource, IconicFont) # Check that the fonts were loaded successfully. fontnames = resource.fontname.values() assert fontnames # Check that qtawesome does not load fonts with duplicate family names. duplicates = [fontname for fontname, count in collections.Counter(fontnames).items() if count > 1] assert not duplicates if __name__ == "__main__": pytest.main()
... # Standard library imports import subprocess import collections # Test Library imports import pytest ... resource = qta._instance() assert isinstance(resource, IconicFont) # Check that the fonts were loaded successfully. fontnames = resource.fontname.values() assert fontnames # Check that qtawesome does not load fonts with duplicate family names. duplicates = [fontname for fontname, count in collections.Counter(fontnames).items() if count > 1] assert not duplicates if __name__ == "__main__": ...
691ccb9e99240f36ab954974e1ecbdea61c4c7b6
datagroupings/templatetags/key.py
datagroupings/templatetags/key.py
import json from django import template register = template.Library() @register.filter(name='key') def key(d, key_name): if key_name in d: return d[key_name] return '' @register.filter(name='value') def value(d, key_name): if key_name in d: return d[key_name] return '' @register.filter(name='minval') def minval(d, key_name): if d is not None: if d.get(key_name) is not None: minval = json.loads(d.get(key_name)).get('minval') if minval is not None: return minval return '' @register.filter(name='maxval') def maxval(d, key_name): if d is not None: if d.get(key_name) is not None: maxval = json.loads(d.get(key_name)).get('maxval') if maxval is not None: return maxval return ''
import json from django import template register = template.Library() @register.filter(name='key') def key(d, key_name): if d is not None: if key_name in d: return d[key_name] return '' @register.filter(name='value') def value(d, key_name): if d is not None: if key_name in d: return d[key_name] return '' @register.filter(name='minval') def minval(d, key_name): if d is not None: if d.get(key_name) is not None: minval = json.loads(d.get(key_name)).get('minval') if minval is not None: return minval return '' @register.filter(name='maxval') def maxval(d, key_name): if d is not None: if d.get(key_name) is not None: maxval = json.loads(d.get(key_name)).get('maxval') if maxval is not None: return maxval return ''
Fix TemplateTag issue with filters
Fix TemplateTag issue with filters
Python
apache-2.0
nagyistoce/geokey,nagyistoce/geokey,nagyistoce/geokey
python
## Code Before: import json from django import template register = template.Library() @register.filter(name='key') def key(d, key_name): if key_name in d: return d[key_name] return '' @register.filter(name='value') def value(d, key_name): if key_name in d: return d[key_name] return '' @register.filter(name='minval') def minval(d, key_name): if d is not None: if d.get(key_name) is not None: minval = json.loads(d.get(key_name)).get('minval') if minval is not None: return minval return '' @register.filter(name='maxval') def maxval(d, key_name): if d is not None: if d.get(key_name) is not None: maxval = json.loads(d.get(key_name)).get('maxval') if maxval is not None: return maxval return '' ## Instruction: Fix TemplateTag issue with filters ## Code After: import json from django import template register = template.Library() @register.filter(name='key') def key(d, key_name): if d is not None: if key_name in d: return d[key_name] return '' @register.filter(name='value') def value(d, key_name): if d is not None: if key_name in d: return d[key_name] return '' @register.filter(name='minval') def minval(d, key_name): if d is not None: if d.get(key_name) is not None: minval = json.loads(d.get(key_name)).get('minval') if minval is not None: return minval return '' @register.filter(name='maxval') def maxval(d, key_name): if d is not None: if d.get(key_name) is not None: maxval = json.loads(d.get(key_name)).get('maxval') if maxval is not None: return maxval return ''
// ... existing code ... @register.filter(name='key') def key(d, key_name): if d is not None: if key_name in d: return d[key_name] return '' // ... modified code ... @register.filter(name='value') def value(d, key_name): if d is not None: if key_name in d: return d[key_name] return '' // ... rest of the code ...
06e1571bd5cd88cac8f764b0e4e8a48503c737f5
src/Walker.h
src/Walker.h
//! \file Walker.h #ifndef _WALKER_H #define _WALKER_H #include "ArticleCollection.h" //! Base class for article analyzers class Walker { protected: //! article collection, used as cache, for walked articles ArticleCollection articleSet; }; #endif // _WALKER_H
//! \file Walker.h #ifndef _WALKER_H #define _WALKER_H #include "ArticleCollection.h" //! Base class for article analyzers class Walker { public: const ArticleCollection& getCollection() const { return articleSet; } protected: //! article collection, used as cache, for walked articles ArticleCollection articleSet; }; #endif // _WALKER_H
Add method to get const article collection in walker
Add method to get const article collection in walker so I don't have to do everything within that class...
C
mit
dueringa/WikiWalker
c
## Code Before: //! \file Walker.h #ifndef _WALKER_H #define _WALKER_H #include "ArticleCollection.h" //! Base class for article analyzers class Walker { protected: //! article collection, used as cache, for walked articles ArticleCollection articleSet; }; #endif // _WALKER_H ## Instruction: Add method to get const article collection in walker so I don't have to do everything within that class... ## Code After: //! \file Walker.h #ifndef _WALKER_H #define _WALKER_H #include "ArticleCollection.h" //! Base class for article analyzers class Walker { public: const ArticleCollection& getCollection() const { return articleSet; } protected: //! article collection, used as cache, for walked articles ArticleCollection articleSet; }; #endif // _WALKER_H
... //! Base class for article analyzers class Walker { public: const ArticleCollection& getCollection() const { return articleSet; } protected: //! article collection, used as cache, for walked articles ArticleCollection articleSet; ...
7c1a6fdc82ccdf8469d95e1e77897fab6e25d551
hammock/__init__.py
hammock/__init__.py
import types from version import __version__ from .model import Model from .collection import Collection class Hammock(object): def __init__(self, collections=(), authenticators=(), storage=None): if type(collections) == types.ModuleType: collection_classes = [] for k,v in collections.__dict__.items(): try: if issubclass(v, Collection): collection_classes.append(v) except TypeError: pass else: collection_classes = collections entities = set() self.collections_by_class_name = {} for collection_cls in collection_classes: entities.add(collection_cls.entity) collection = collection_cls(storage) self.collections_by_class_name[collection_cls.__name__] = collection setattr(self, collection_cls.plural_name, collection) self.model = Model(storage, entities) for collection in self.collections_by_class_name.values(): new_links = {} if collection.links: for k, v in collection.links.items(): if not isinstance(v, basestring): v = v.__name__ referenced_collection = self.collections_by_class_name.get(v) new_links[k] = referenced_collection collection.links = new_links
import types from version import __version__ from .model import Model from .collection import Collection class Hammock(object): def __init__(self, collections=(), authenticators=(), storage=None): if type(collections) == types.ModuleType: collection_classes = [] for k,v in collections.__dict__.items(): try: if issubclass(v, Collection) and v != Collection: collection_classes.append(v) except TypeError: pass else: collection_classes = collections entities = set() self.collections_by_class_name = {} for collection_cls in collection_classes: entities.add(collection_cls.entity) collection = collection_cls(storage) self.collections_by_class_name[collection_cls.__name__] = collection setattr(self, collection_cls.plural_name, collection) self.model = Model(storage, entities) for collection in self.collections_by_class_name.values(): new_links = {} if collection.links: for k, v in collection.links.items(): if not isinstance(v, basestring): v = v.__name__ referenced_collection = self.collections_by_class_name.get(v) new_links[k] = referenced_collection collection.links = new_links
Make sure we don't include Collection when pulling collections from a module
Make sure we don't include Collection when pulling collections from a module
Python
mit
cooper-software/cellardoor
python
## Code Before: import types from version import __version__ from .model import Model from .collection import Collection class Hammock(object): def __init__(self, collections=(), authenticators=(), storage=None): if type(collections) == types.ModuleType: collection_classes = [] for k,v in collections.__dict__.items(): try: if issubclass(v, Collection): collection_classes.append(v) except TypeError: pass else: collection_classes = collections entities = set() self.collections_by_class_name = {} for collection_cls in collection_classes: entities.add(collection_cls.entity) collection = collection_cls(storage) self.collections_by_class_name[collection_cls.__name__] = collection setattr(self, collection_cls.plural_name, collection) self.model = Model(storage, entities) for collection in self.collections_by_class_name.values(): new_links = {} if collection.links: for k, v in collection.links.items(): if not isinstance(v, basestring): v = v.__name__ referenced_collection = self.collections_by_class_name.get(v) new_links[k] = referenced_collection collection.links = new_links ## Instruction: Make sure we don't include Collection when pulling collections from a module ## Code After: import types from version import __version__ from .model import Model from .collection import Collection class Hammock(object): def __init__(self, collections=(), authenticators=(), storage=None): if type(collections) == types.ModuleType: collection_classes = [] for k,v in collections.__dict__.items(): try: if issubclass(v, Collection) and v != Collection: collection_classes.append(v) except TypeError: pass else: collection_classes = collections entities = set() self.collections_by_class_name = {} for collection_cls in collection_classes: entities.add(collection_cls.entity) collection = collection_cls(storage) self.collections_by_class_name[collection_cls.__name__] = collection setattr(self, collection_cls.plural_name, collection) self.model = Model(storage, entities) for collection in self.collections_by_class_name.values(): new_links = {} if collection.links: for k, v in collection.links.items(): if not isinstance(v, basestring): v = v.__name__ referenced_collection = self.collections_by_class_name.get(v) new_links[k] = referenced_collection collection.links = new_links
... collection_classes = [] for k,v in collections.__dict__.items(): try: if issubclass(v, Collection) and v != Collection: collection_classes.append(v) except TypeError: pass ...
141b46d1f5178df7e110aee7b2b50ce6f5b44b7a
contrib/python-copper/t/test_wsgi.py
contrib/python-copper/t/test_wsgi.py
from copper.wsgi_support import wsgi def test_http_handler(copper_client, copper_http_client): def application(environ, start_response): message = 'Hello, %s!' % (environ['PATH_INFO'],) start_response('200 OK', [ ('Content-Type', 'text/plain; charset=UTF-8'), ('Content-Length', '%d' % len(message)), ]) return [message] with copper_client.publish('http:/hello/', wsgi(application)): result = copper_http_client.open('copper:///hello/world').read() assert result == 'Hello, /hello/world!' result = copper_http_client.open('copper:///hello/foobar').read() assert result == 'Hello, /hello/foobar!' res = copper_http_client.open('copper:///hello') assert res.code == 404 res = copper_http_client.open('copper:///foobar') assert res.code == 404
from copper.wsgi_support import wsgi def test_http_handler(copper_client, copper_http_client): def application(environ, start_response): message = 'Hello, %s!' % (environ['PATH_INFO'],) start_response('200 OK', [ ('Content-Type', 'text/plain; charset=UTF-8'), ('Content-Length', '%d' % len(message)), ]) return [message] with copper_client.publish('http:hello', wsgi(application)): result = copper_http_client.open('copper:///hello/world').read() assert result == 'Hello, /world!' result = copper_http_client.open('copper:///hello/foobar').read() assert result == 'Hello, /foobar!' res = copper_http_client.open('copper:///hello') assert res.code == 404 res = copper_http_client.open('copper:///foobar') assert res.code == 404
Fix python tests re: http routing
Fix python tests re: http routing
Python
mit
snaury/copper,snaury/copper,snaury/copper
python
## Code Before: from copper.wsgi_support import wsgi def test_http_handler(copper_client, copper_http_client): def application(environ, start_response): message = 'Hello, %s!' % (environ['PATH_INFO'],) start_response('200 OK', [ ('Content-Type', 'text/plain; charset=UTF-8'), ('Content-Length', '%d' % len(message)), ]) return [message] with copper_client.publish('http:/hello/', wsgi(application)): result = copper_http_client.open('copper:///hello/world').read() assert result == 'Hello, /hello/world!' result = copper_http_client.open('copper:///hello/foobar').read() assert result == 'Hello, /hello/foobar!' res = copper_http_client.open('copper:///hello') assert res.code == 404 res = copper_http_client.open('copper:///foobar') assert res.code == 404 ## Instruction: Fix python tests re: http routing ## Code After: from copper.wsgi_support import wsgi def test_http_handler(copper_client, copper_http_client): def application(environ, start_response): message = 'Hello, %s!' % (environ['PATH_INFO'],) start_response('200 OK', [ ('Content-Type', 'text/plain; charset=UTF-8'), ('Content-Length', '%d' % len(message)), ]) return [message] with copper_client.publish('http:hello', wsgi(application)): result = copper_http_client.open('copper:///hello/world').read() assert result == 'Hello, /world!' result = copper_http_client.open('copper:///hello/foobar').read() assert result == 'Hello, /foobar!' res = copper_http_client.open('copper:///hello') assert res.code == 404 res = copper_http_client.open('copper:///foobar') assert res.code == 404
... ('Content-Length', '%d' % len(message)), ]) return [message] with copper_client.publish('http:hello', wsgi(application)): result = copper_http_client.open('copper:///hello/world').read() assert result == 'Hello, /world!' result = copper_http_client.open('copper:///hello/foobar').read() assert result == 'Hello, /foobar!' res = copper_http_client.open('copper:///hello') assert res.code == 404 res = copper_http_client.open('copper:///foobar') ...
0ae9fcccb1c67a8d9337e4ef2887fb7ea2e01d51
mpltools/io/core.py
mpltools/io/core.py
import os import matplotlib.pyplot as plt def save_all_figs(directory='./', fmt=None, default_name='untitled%i'): """Save all open figures. Each figure is saved with the title of the plot, if possible. Parameters ------------ directory : str Path where figures are saved. fmt : str, list of str Image format(s) of saved figures. If None, default to rc parameter 'savefig.extension'. default_name : str Default filename to use if plot has no title. Must contain '%i' for the figure number. Examples -------- >>> save_all_figs('plots/', fmt=['pdf','png']) """ for fignum in plt.get_fignums(): try: filename = plt.figure(fignum).get_axes()[0].get_title() if filename == '': filename = default_name % fignum savename = os.path.join(directory, filename) if fmt is None: fmt = plt.rcParams.get('savefig.extension','png') if isinstance(fmt, basestring): fmt = [fmt] for a_fmt in fmt: plt.savefig(savename + '.' + a_fmt) print ('Saved \'%s\' '% (savename + '.' + a_fmt)) except(IndexError): pass
import os import matplotlib.pyplot as plt def save_all_figs(directory='./', fmt=None, default_name='untitled%i'): """Save all open figures. Each figure is saved with the title of the plot, if possible. Parameters ------------ directory : str Path where figures are saved. fmt : str, list of str Image format(s) of saved figures. If None, default to rc parameter 'savefig.extension'. default_name : str Default filename to use if plot has no title. Must contain '%i' for the figure number. Examples -------- >>> save_all_figs('plots/', fmt=['pdf','png']) """ fmt = fmt if fmt is not None else 'png' if isinstance(fmt, basestring): fmt = [fmt] for fignum in plt.get_fignums(): try: filename = plt.figure(fignum).get_axes()[0].get_title() if filename == '': filename = default_name % fignum savepath = os.path.join(directory, filename) for a_fmt in fmt: savename = '%s.%s' % (savepath, a_fmt) plt.savefig(savename) print("Saved '%s'" % savename) except(IndexError): pass
Refactor formatting of save name.
Refactor formatting of save name.
Python
bsd-3-clause
tonysyu/mpltools,matteoicardi/mpltools
python
## Code Before: import os import matplotlib.pyplot as plt def save_all_figs(directory='./', fmt=None, default_name='untitled%i'): """Save all open figures. Each figure is saved with the title of the plot, if possible. Parameters ------------ directory : str Path where figures are saved. fmt : str, list of str Image format(s) of saved figures. If None, default to rc parameter 'savefig.extension'. default_name : str Default filename to use if plot has no title. Must contain '%i' for the figure number. Examples -------- >>> save_all_figs('plots/', fmt=['pdf','png']) """ for fignum in plt.get_fignums(): try: filename = plt.figure(fignum).get_axes()[0].get_title() if filename == '': filename = default_name % fignum savename = os.path.join(directory, filename) if fmt is None: fmt = plt.rcParams.get('savefig.extension','png') if isinstance(fmt, basestring): fmt = [fmt] for a_fmt in fmt: plt.savefig(savename + '.' + a_fmt) print ('Saved \'%s\' '% (savename + '.' + a_fmt)) except(IndexError): pass ## Instruction: Refactor formatting of save name. ## Code After: import os import matplotlib.pyplot as plt def save_all_figs(directory='./', fmt=None, default_name='untitled%i'): """Save all open figures. Each figure is saved with the title of the plot, if possible. Parameters ------------ directory : str Path where figures are saved. fmt : str, list of str Image format(s) of saved figures. If None, default to rc parameter 'savefig.extension'. default_name : str Default filename to use if plot has no title. Must contain '%i' for the figure number. Examples -------- >>> save_all_figs('plots/', fmt=['pdf','png']) """ fmt = fmt if fmt is not None else 'png' if isinstance(fmt, basestring): fmt = [fmt] for fignum in plt.get_fignums(): try: filename = plt.figure(fignum).get_axes()[0].get_title() if filename == '': filename = default_name % fignum savepath = os.path.join(directory, filename) for a_fmt in fmt: savename = '%s.%s' % (savepath, a_fmt) plt.savefig(savename) print("Saved '%s'" % savename) except(IndexError): pass
// ... existing code ... >>> save_all_figs('plots/', fmt=['pdf','png']) """ fmt = fmt if fmt is not None else 'png' if isinstance(fmt, basestring): fmt = [fmt] for fignum in plt.get_fignums(): try: filename = plt.figure(fignum).get_axes()[0].get_title() if filename == '': filename = default_name % fignum savepath = os.path.join(directory, filename) for a_fmt in fmt: savename = '%s.%s' % (savepath, a_fmt) plt.savefig(savename) print("Saved '%s'" % savename) except(IndexError): pass // ... rest of the code ...
27c2878ab43ff1e38492e17971166e8fe3c8f1e1
tests/unit/test_test_setup.py
tests/unit/test_test_setup.py
"""Tests for correctly generated, working setup.""" from os import system from sys import version_info from . import pytest_generate_tests # noqa, pylint: disable=unused-import # pylint: disable=too-few-public-methods class TestTestSetup(object): """ Tests for verifying generated test setups of this cookiecutter, executed several times with different values (test scenarios). """ scenarios = [ ('django', { 'project_slug': 'django-project', 'framework': 'Django', }), # ('flask', { # 'project_slug': 'flask-project', # 'framework': 'Flask', # }), ] # pylint: disable=no-self-use def test_test_setup(self, cookies, project_slug, framework): """ Generate a project and verify the test setup executes successfully. """ py_version = 'py%s%s' % version_info[:2] result = cookies.bake(extra_context={ 'project_slug': project_slug, 'framework': framework, 'tests': 'flake8,pylint,%s,behave' % py_version, }) assert result.exit_code == 0 assert result.exception is None tox_ini = result.project.join('tox.ini') assert tox_ini.isfile() exit_code = system('tox -c %s' % tox_ini) assert exit_code == 0, 'Running tests in generated project fails.'
"""Tests for correctly generated, working setup.""" from os import system from sys import version_info from . import pytest_generate_tests # noqa, pylint: disable=unused-import # pylint: disable=too-few-public-methods class TestTestSetup(object): """ Tests for verifying generated test setups of this cookiecutter, executed several times with different values (test scenarios). """ scenarios = [ ('django', { 'project_slug': 'django-project', 'framework': 'Django', }), # ('flask', { # 'project_slug': 'flask-project', # 'framework': 'Flask', # }), ] # pylint: disable=no-self-use def test_test_setup(self, cookies, project_slug, framework): """ Generate a project and verify the test setup executes successfully. """ major, minor = version_info[:2] py_version = 'py%s%s' % (major, minor) result = cookies.bake(extra_context={ 'project_slug': project_slug, 'framework': framework, 'tests': 'flake8,pylint,%s,behave' % py_version, }) assert result.exit_code == 0, \ 'Cookiecutter exits with %(exit_code)s:' \ ' %(exception)s' % result.__dict__ assert result.exception is None tox_ini = result.project.join('tox.ini') assert tox_ini.isfile() exit_code = system('tox -c %s' % tox_ini) assert exit_code == 0, 'Running tests in generated project fails.'
Make py_version and assertion more readable
Make py_version and assertion more readable
Python
apache-2.0
painless-software/painless-continuous-delivery,painless-software/painless-continuous-delivery,painless-software/painless-continuous-delivery,painless-software/painless-continuous-delivery
python
## Code Before: """Tests for correctly generated, working setup.""" from os import system from sys import version_info from . import pytest_generate_tests # noqa, pylint: disable=unused-import # pylint: disable=too-few-public-methods class TestTestSetup(object): """ Tests for verifying generated test setups of this cookiecutter, executed several times with different values (test scenarios). """ scenarios = [ ('django', { 'project_slug': 'django-project', 'framework': 'Django', }), # ('flask', { # 'project_slug': 'flask-project', # 'framework': 'Flask', # }), ] # pylint: disable=no-self-use def test_test_setup(self, cookies, project_slug, framework): """ Generate a project and verify the test setup executes successfully. """ py_version = 'py%s%s' % version_info[:2] result = cookies.bake(extra_context={ 'project_slug': project_slug, 'framework': framework, 'tests': 'flake8,pylint,%s,behave' % py_version, }) assert result.exit_code == 0 assert result.exception is None tox_ini = result.project.join('tox.ini') assert tox_ini.isfile() exit_code = system('tox -c %s' % tox_ini) assert exit_code == 0, 'Running tests in generated project fails.' ## Instruction: Make py_version and assertion more readable ## Code After: """Tests for correctly generated, working setup.""" from os import system from sys import version_info from . import pytest_generate_tests # noqa, pylint: disable=unused-import # pylint: disable=too-few-public-methods class TestTestSetup(object): """ Tests for verifying generated test setups of this cookiecutter, executed several times with different values (test scenarios). """ scenarios = [ ('django', { 'project_slug': 'django-project', 'framework': 'Django', }), # ('flask', { # 'project_slug': 'flask-project', # 'framework': 'Flask', # }), ] # pylint: disable=no-self-use def test_test_setup(self, cookies, project_slug, framework): """ Generate a project and verify the test setup executes successfully. """ major, minor = version_info[:2] py_version = 'py%s%s' % (major, minor) result = cookies.bake(extra_context={ 'project_slug': project_slug, 'framework': framework, 'tests': 'flake8,pylint,%s,behave' % py_version, }) assert result.exit_code == 0, \ 'Cookiecutter exits with %(exit_code)s:' \ ' %(exception)s' % result.__dict__ assert result.exception is None tox_ini = result.project.join('tox.ini') assert tox_ini.isfile() exit_code = system('tox -c %s' % tox_ini) assert exit_code == 0, 'Running tests in generated project fails.'
# ... existing code ... """ Generate a project and verify the test setup executes successfully. """ major, minor = version_info[:2] py_version = 'py%s%s' % (major, minor) result = cookies.bake(extra_context={ 'project_slug': project_slug, 'framework': framework, # ... modified code ... 'tests': 'flake8,pylint,%s,behave' % py_version, }) assert result.exit_code == 0, \ 'Cookiecutter exits with %(exit_code)s:' \ ' %(exception)s' % result.__dict__ assert result.exception is None tox_ini = result.project.join('tox.ini') # ... rest of the code ...
96355a918a22e53d0c2ae369aae77b2c7b4b276e
third_party/widevine/cdm/android/widevine_cdm_version.h
third_party/widevine/cdm/android/widevine_cdm_version.h
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef WIDEVINE_CDM_VERSION_H_ #define WIDEVINE_CDM_VERSION_H_ #include "third_party/widevine/cdm/widevine_cdm_common.h" // Indicates that the Widevine CDM is available. #define WIDEVINE_CDM_AVAILABLE // Indicates that AVC1 decoding is available for ISO BMFF CENC. #define WIDEVINE_CDM_AVC1_SUPPORT_AVAILABLE // Indicates that AAC decoding is available for ISO BMFF CENC. #define WIDEVINE_CDM_AAC_SUPPORT_AVAILABLE #endif // WIDEVINE_CDM_VERSION_H_
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef WIDEVINE_CDM_VERSION_H_ #define WIDEVINE_CDM_VERSION_H_ #include "third_party/widevine/cdm/widevine_cdm_common.h" // Indicates that the Widevine CDM is available. #define WIDEVINE_CDM_AVAILABLE #endif // WIDEVINE_CDM_VERSION_H_
Remove obsolete defines from Android CDM file.
Remove obsolete defines from Android CDM file. BUG=349185 Review URL: https://codereview.chromium.org/1000863003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#321407}
C
bsd-3-clause
ltilve/chromium,Chilledheart/chromium,Chilledheart/chromium,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Fireblend/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,ltilve/chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk
c
## Code Before: // Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef WIDEVINE_CDM_VERSION_H_ #define WIDEVINE_CDM_VERSION_H_ #include "third_party/widevine/cdm/widevine_cdm_common.h" // Indicates that the Widevine CDM is available. #define WIDEVINE_CDM_AVAILABLE // Indicates that AVC1 decoding is available for ISO BMFF CENC. #define WIDEVINE_CDM_AVC1_SUPPORT_AVAILABLE // Indicates that AAC decoding is available for ISO BMFF CENC. #define WIDEVINE_CDM_AAC_SUPPORT_AVAILABLE #endif // WIDEVINE_CDM_VERSION_H_ ## Instruction: Remove obsolete defines from Android CDM file. BUG=349185 Review URL: https://codereview.chromium.org/1000863003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#321407} ## Code After: // Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef WIDEVINE_CDM_VERSION_H_ #define WIDEVINE_CDM_VERSION_H_ #include "third_party/widevine/cdm/widevine_cdm_common.h" // Indicates that the Widevine CDM is available. #define WIDEVINE_CDM_AVAILABLE #endif // WIDEVINE_CDM_VERSION_H_
... // Indicates that the Widevine CDM is available. #define WIDEVINE_CDM_AVAILABLE #endif // WIDEVINE_CDM_VERSION_H_ ...
4aad37d8186fab025ba29050620a929c167ca497
pulsar/locks.py
pulsar/locks.py
try: import lockfile except ImportError: lockfile = None import threading import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, expect suboptimal Pulsar lock handling." class LockManager(): def __init__(self, lockfile=lockfile): if not lockfile: log.info(NO_PYLOCKFILE_MESSAGE) self.job_locks = dict({}) self.job_locks_lock = threading.Lock() self.lockfile = lockfile def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock def free_lock(self, path): # Not needed with pylockfile # Not currently be called, will result in tiny memory leak if # pylockfile is unavailable - so if you process millions of jobs # install pylockfile. if not self.lockfile: with self.job_locks_lock: if path in self.job_locks: del self.job_locks[path]
try: import lockfile except ImportError: lockfile = None import threading import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, skipping experimental lockfile handling." class LockManager(): def __init__(self, lockfile=lockfile): if not lockfile: log.info(NO_PYLOCKFILE_MESSAGE) self.job_locks = dict({}) self.job_locks_lock = threading.Lock() self.lockfile = lockfile def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock def free_lock(self, path): # Not needed with pylockfile # Not currently be called, will result in tiny memory leak if # pylockfile is unavailable - so if you process millions of jobs # install pylockfile. if not self.lockfile: with self.job_locks_lock: if path in self.job_locks: del self.job_locks[path]
Fix misleading message about pylockfile.
Fix misleading message about pylockfile.
Python
apache-2.0
ssorgatem/pulsar,natefoo/pulsar,natefoo/pulsar,ssorgatem/pulsar,galaxyproject/pulsar,galaxyproject/pulsar
python
## Code Before: try: import lockfile except ImportError: lockfile = None import threading import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, expect suboptimal Pulsar lock handling." class LockManager(): def __init__(self, lockfile=lockfile): if not lockfile: log.info(NO_PYLOCKFILE_MESSAGE) self.job_locks = dict({}) self.job_locks_lock = threading.Lock() self.lockfile = lockfile def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock def free_lock(self, path): # Not needed with pylockfile # Not currently be called, will result in tiny memory leak if # pylockfile is unavailable - so if you process millions of jobs # install pylockfile. if not self.lockfile: with self.job_locks_lock: if path in self.job_locks: del self.job_locks[path] ## Instruction: Fix misleading message about pylockfile. ## Code After: try: import lockfile except ImportError: lockfile = None import threading import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, skipping experimental lockfile handling." class LockManager(): def __init__(self, lockfile=lockfile): if not lockfile: log.info(NO_PYLOCKFILE_MESSAGE) self.job_locks = dict({}) self.job_locks_lock = threading.Lock() self.lockfile = lockfile def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock def free_lock(self, path): # Not needed with pylockfile # Not currently be called, will result in tiny memory leak if # pylockfile is unavailable - so if you process millions of jobs # install pylockfile. if not self.lockfile: with self.job_locks_lock: if path in self.job_locks: del self.job_locks[path]
... import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, skipping experimental lockfile handling." class LockManager(): ...
3275c3b68021331e046728f80eef2f4aa27d8599
buildSrc/src/main/kotlin/buildsrc/convention/android-application.gradle.kts
buildSrc/src/main/kotlin/buildsrc/convention/android-application.gradle.kts
package buildsrc.convention import buildsrc.config.Deps import org.gradle.jvm.tasks.Jar plugins { id("com.android.application") id("org.jetbrains.dokka") id("buildsrc.convention.base") } android { compileSdkVersion = "android-32" lint { abortOnError = false disable += "InvalidPackage" warning += "NewApi" } packagingOptions { resources { excludes += "META-INF/main.kotlin_module" } } defaultConfig { minSdk = 21 targetSdk = 32 } compileOptions { sourceCompatibility = Deps.Versions.jvmTarget targetCompatibility = Deps.Versions.jvmTarget } } val javadocJar by tasks.registering(Jar::class) { from(tasks.dokkaJavadoc) archiveClassifier.set("javadoc") }
package buildsrc.convention import buildsrc.config.Deps plugins { id("com.android.application") id("org.jetbrains.dokka") id("buildsrc.convention.base") } android { compileSdkVersion = "android-32" lint { abortOnError = false disable += "InvalidPackage" warning += "NewApi" } packagingOptions { resources { excludes += "META-INF/main.kotlin_module" } } defaultConfig { minSdk = 21 targetSdk = 32 } compileOptions { sourceCompatibility = Deps.Versions.jvmTarget targetCompatibility = Deps.Versions.jvmTarget } } val javadocJar by tasks.registering(Jar::class) { from(tasks.dokkaJavadoc) archiveClassifier.set("javadoc") }
Remove wrong Jar task import
Remove wrong Jar task import
Kotlin
apache-2.0
mockk/mockk,mockk/mockk,mockk/mockk,mockk/mockk,mockk/mockk,mockk/mockk
kotlin
## Code Before: package buildsrc.convention import buildsrc.config.Deps import org.gradle.jvm.tasks.Jar plugins { id("com.android.application") id("org.jetbrains.dokka") id("buildsrc.convention.base") } android { compileSdkVersion = "android-32" lint { abortOnError = false disable += "InvalidPackage" warning += "NewApi" } packagingOptions { resources { excludes += "META-INF/main.kotlin_module" } } defaultConfig { minSdk = 21 targetSdk = 32 } compileOptions { sourceCompatibility = Deps.Versions.jvmTarget targetCompatibility = Deps.Versions.jvmTarget } } val javadocJar by tasks.registering(Jar::class) { from(tasks.dokkaJavadoc) archiveClassifier.set("javadoc") } ## Instruction: Remove wrong Jar task import ## Code After: package buildsrc.convention import buildsrc.config.Deps plugins { id("com.android.application") id("org.jetbrains.dokka") id("buildsrc.convention.base") } android { compileSdkVersion = "android-32" lint { abortOnError = false disable += "InvalidPackage" warning += "NewApi" } packagingOptions { resources { excludes += "META-INF/main.kotlin_module" } } defaultConfig { minSdk = 21 targetSdk = 32 } compileOptions { sourceCompatibility = Deps.Versions.jvmTarget targetCompatibility = Deps.Versions.jvmTarget } } val javadocJar by tasks.registering(Jar::class) { from(tasks.dokkaJavadoc) archiveClassifier.set("javadoc") }
... package buildsrc.convention import buildsrc.config.Deps plugins { id("com.android.application") ...
e2722385831a0930765d2c4bb78a582d41f4b64b
src/sentry/replays.py
src/sentry/replays.py
from __future__ import absolute_import import socket from httplib import HTTPConnection, HTTPSConnection from urllib import urlencode from urlparse import urlparse class Replayer(object): def __init__(self, url, method, data=None, headers=None): self.url = url self.method = method self.data = data self.headers = headers def replay(self): urlparts = urlparse(self.url) if urlparts.scheme == 'http': conn_cls = HTTPConnection elif urlparts.scheme == 'https': conn_cls = HTTPSConnection else: raise ValueError(self.url) data = self.data if isinstance(data, dict): data = urlencode(data) if urlparts.query: full_url = urlparts.path + '?' + urlparts.query else: full_url = urlparts.path conn = conn_cls(urlparts.netloc) try: conn.request(self.method, full_url, data, self.headers or {}) response = conn.getresponse() except socket.error as e: return { 'status': 'error', 'reason': str(e), } return { 'status': response.status, 'reason': response.reason, 'headers': response.getheaders(), 'body': response.read(), }
from __future__ import absolute_import import requests class Replayer(object): def __init__(self, url, method, data=None, headers=None): self.url = url self.method = method self.data = data self.headers = headers def replay(self): try: response = requests.request( self.method, self.url, data=self.data, headers=self.headers or {} ) except requests.RequestException as e: return { 'status': 'error', 'reason': str(e), } return { 'status': response.status_code, 'reason': response.reason, 'headers': response.headers, 'body': response.content, }
Use requests instead of httplib to do replay
Use requests instead of httplib to do replay
Python
bsd-3-clause
beeftornado/sentry,nicholasserra/sentry,Kryz/sentry,JackDanger/sentry,imankulov/sentry,JamesMura/sentry,zenefits/sentry,kevinlondon/sentry,mvaled/sentry,JamesMura/sentry,ifduyue/sentry,looker/sentry,daevaorn/sentry,fotinakis/sentry,gencer/sentry,looker/sentry,JackDanger/sentry,mvaled/sentry,Natim/sentry,beeftornado/sentry,korealerts1/sentry,imankulov/sentry,zenefits/sentry,jean/sentry,alexm92/sentry,fotinakis/sentry,daevaorn/sentry,beeftornado/sentry,BuildingLink/sentry,ngonzalvez/sentry,BayanGroup/sentry,mvaled/sentry,mitsuhiko/sentry,daevaorn/sentry,ngonzalvez/sentry,looker/sentry,korealerts1/sentry,mvaled/sentry,gencer/sentry,imankulov/sentry,Kryz/sentry,looker/sentry,felixbuenemann/sentry,jean/sentry,mitsuhiko/sentry,fotinakis/sentry,ifduyue/sentry,mvaled/sentry,korealerts1/sentry,kevinlondon/sentry,Natim/sentry,alexm92/sentry,zenefits/sentry,zenefits/sentry,BayanGroup/sentry,jean/sentry,mvaled/sentry,ifduyue/sentry,JamesMura/sentry,kevinlondon/sentry,BuildingLink/sentry,BayanGroup/sentry,JamesMura/sentry,nicholasserra/sentry,felixbuenemann/sentry,Kryz/sentry,BuildingLink/sentry,BuildingLink/sentry,Natim/sentry,BuildingLink/sentry,jean/sentry,JackDanger/sentry,felixbuenemann/sentry,fotinakis/sentry,nicholasserra/sentry,daevaorn/sentry,ngonzalvez/sentry,looker/sentry,alexm92/sentry,gencer/sentry,gencer/sentry,gencer/sentry,zenefits/sentry,JamesMura/sentry,ifduyue/sentry,jean/sentry,ifduyue/sentry
python
## Code Before: from __future__ import absolute_import import socket from httplib import HTTPConnection, HTTPSConnection from urllib import urlencode from urlparse import urlparse class Replayer(object): def __init__(self, url, method, data=None, headers=None): self.url = url self.method = method self.data = data self.headers = headers def replay(self): urlparts = urlparse(self.url) if urlparts.scheme == 'http': conn_cls = HTTPConnection elif urlparts.scheme == 'https': conn_cls = HTTPSConnection else: raise ValueError(self.url) data = self.data if isinstance(data, dict): data = urlencode(data) if urlparts.query: full_url = urlparts.path + '?' + urlparts.query else: full_url = urlparts.path conn = conn_cls(urlparts.netloc) try: conn.request(self.method, full_url, data, self.headers or {}) response = conn.getresponse() except socket.error as e: return { 'status': 'error', 'reason': str(e), } return { 'status': response.status, 'reason': response.reason, 'headers': response.getheaders(), 'body': response.read(), } ## Instruction: Use requests instead of httplib to do replay ## Code After: from __future__ import absolute_import import requests class Replayer(object): def __init__(self, url, method, data=None, headers=None): self.url = url self.method = method self.data = data self.headers = headers def replay(self): try: response = requests.request( self.method, self.url, data=self.data, headers=self.headers or {} ) except requests.RequestException as e: return { 'status': 'error', 'reason': str(e), } return { 'status': response.status_code, 'reason': response.reason, 'headers': response.headers, 'body': response.content, }
# ... existing code ... from __future__ import absolute_import import requests class Replayer(object): # ... modified code ... self.headers = headers def replay(self): try: response = requests.request( self.method, self.url, data=self.data, headers=self.headers or {} ) except requests.RequestException as e: return { 'status': 'error', 'reason': str(e), ... } return { 'status': response.status_code, 'reason': response.reason, 'headers': response.headers, 'body': response.content, } # ... rest of the code ...
c9eb050771c487aae67e0a1f8c69ad011b6aaa90
repomgmt/management/commands/process-build-queue.py
repomgmt/management/commands/process-build-queue.py
from django.core.management.base import BaseCommand from repomgmt.models import BuildNode, BuildRecord import repomgmt.utils class Command(BaseCommand): args = '' help = 'Processes the build queue' def handle(self, **options): if BuildRecord.pending_build_count() > 0: bn = BuildNode.start_new() br = BuildRecord.pick_build(bn) bn.prepare(br) bn.build(br)
from django.core.management.base import BaseCommand from repomgmt.models import BuildNode, BuildRecord class Command(BaseCommand): args = '' help = 'Processes the build queue' def handle(self, **options): if BuildRecord.pending_build_count() > 0: bn = BuildNode.start_new() br = BuildRecord.pick_build(bn) bn.prepare(br) bn.build(br)
Add tailing / to APT_REPO_BASE_URL
Add tailing / to APT_REPO_BASE_URL
Python
apache-2.0
sorenh/python-django-repomgmt,sorenh/python-django-repomgmt
python
## Code Before: from django.core.management.base import BaseCommand from repomgmt.models import BuildNode, BuildRecord import repomgmt.utils class Command(BaseCommand): args = '' help = 'Processes the build queue' def handle(self, **options): if BuildRecord.pending_build_count() > 0: bn = BuildNode.start_new() br = BuildRecord.pick_build(bn) bn.prepare(br) bn.build(br) ## Instruction: Add tailing / to APT_REPO_BASE_URL ## Code After: from django.core.management.base import BaseCommand from repomgmt.models import BuildNode, BuildRecord class Command(BaseCommand): args = '' help = 'Processes the build queue' def handle(self, **options): if BuildRecord.pending_build_count() > 0: bn = BuildNode.start_new() br = BuildRecord.pick_build(bn) bn.prepare(br) bn.build(br)
// ... existing code ... from django.core.management.base import BaseCommand from repomgmt.models import BuildNode, BuildRecord class Command(BaseCommand): args = '' // ... rest of the code ...
b093a40ab8bbae9f7510d126ce1400a475f921d9
service-routing/src/main/java/fi/nls/oskari/routing/RoutingService.java
service-routing/src/main/java/fi/nls/oskari/routing/RoutingService.java
package fi.nls.oskari.routing; import com.ibatis.common.resources.Resources; import com.ibatis.sqlmap.client.SqlMapClient; import com.ibatis.sqlmap.client.SqlMapClientBuilder; import java.io.Reader; public class RoutingService { private SqlMapClient client = null; private static String SQL_MAP_LOCATION = "META-INF/SqlMapConfig.xml"; protected SqlMapClient getSqlMapClient() { if (client != null) { return client; } Reader reader = null; try { reader = Resources.getResourceAsReader(SQL_MAP_LOCATION); client = SqlMapClientBuilder.buildSqlMapClient(reader); return client; } catch (Exception e) { throw new RuntimeException("Failed to retrieve SQL client", e); } finally { if (reader != null) { try { reader.close(); } catch (Exception e) { throw new RuntimeException(e); } } } } public Long hevonen() { try { client = getSqlMapClient(); Long results = (Long)client.queryForObject("Routing.foo"); return results; } catch (Exception e) { System.out.println(e.getMessage()); throw new RuntimeException("Failed to query", e); } } }
package fi.nls.oskari.routing; import com.ibatis.common.resources.Resources; import com.ibatis.sqlmap.client.SqlMapClient; import com.ibatis.sqlmap.client.SqlMapClientBuilder; import java.io.Reader; public class RoutingService { private SqlMapClient client = null; private static String SQL_MAP_LOCATION = "META-INF/SqlMapConfig.xml"; private SqlMapClient getSqlMapClient() { if (client != null) { return client; } Reader reader = null; try { reader = Resources.getResourceAsReader(SQL_MAP_LOCATION); client = SqlMapClientBuilder.buildSqlMapClient(reader); return client; } catch (Exception e) { throw new RuntimeException("Failed to retrieve SQL client", e); } finally { if (reader != null) { try { reader.close(); } catch (Exception e) { throw new RuntimeException(e); } } } } public Long hevonen() { try { SqlMapClient client = getSqlMapClient(); Long results = (Long)client.queryForObject("Routing.foo"); return results; } catch (Exception e) { System.out.println(e.getMessage()); throw new RuntimeException("Failed to query", e); } } }
Use local variable instead of member variable
Use local variable instead of member variable
Java
mit
uhef/Oskari-Routing,uhef/Oskari-Routing,uhef/Oskari-Routing,uhef/Oskari-Routing
java
## Code Before: package fi.nls.oskari.routing; import com.ibatis.common.resources.Resources; import com.ibatis.sqlmap.client.SqlMapClient; import com.ibatis.sqlmap.client.SqlMapClientBuilder; import java.io.Reader; public class RoutingService { private SqlMapClient client = null; private static String SQL_MAP_LOCATION = "META-INF/SqlMapConfig.xml"; protected SqlMapClient getSqlMapClient() { if (client != null) { return client; } Reader reader = null; try { reader = Resources.getResourceAsReader(SQL_MAP_LOCATION); client = SqlMapClientBuilder.buildSqlMapClient(reader); return client; } catch (Exception e) { throw new RuntimeException("Failed to retrieve SQL client", e); } finally { if (reader != null) { try { reader.close(); } catch (Exception e) { throw new RuntimeException(e); } } } } public Long hevonen() { try { client = getSqlMapClient(); Long results = (Long)client.queryForObject("Routing.foo"); return results; } catch (Exception e) { System.out.println(e.getMessage()); throw new RuntimeException("Failed to query", e); } } } ## Instruction: Use local variable instead of member variable ## Code After: package fi.nls.oskari.routing; import com.ibatis.common.resources.Resources; import com.ibatis.sqlmap.client.SqlMapClient; import com.ibatis.sqlmap.client.SqlMapClientBuilder; import java.io.Reader; public class RoutingService { private SqlMapClient client = null; private static String SQL_MAP_LOCATION = "META-INF/SqlMapConfig.xml"; private SqlMapClient getSqlMapClient() { if (client != null) { return client; } Reader reader = null; try { reader = Resources.getResourceAsReader(SQL_MAP_LOCATION); client = SqlMapClientBuilder.buildSqlMapClient(reader); return client; } catch (Exception e) { throw new RuntimeException("Failed to retrieve SQL client", e); } finally { if (reader != null) { try { reader.close(); } catch (Exception e) { throw new RuntimeException(e); } } } } public Long hevonen() { try { SqlMapClient client = getSqlMapClient(); Long results = (Long)client.queryForObject("Routing.foo"); return results; } catch (Exception e) { System.out.println(e.getMessage()); throw new RuntimeException("Failed to query", e); } } }
... private SqlMapClient client = null; private static String SQL_MAP_LOCATION = "META-INF/SqlMapConfig.xml"; private SqlMapClient getSqlMapClient() { if (client != null) { return client; } Reader reader = null; try { ... public Long hevonen() { try { SqlMapClient client = getSqlMapClient(); Long results = (Long)client.queryForObject("Routing.foo"); return results; } catch (Exception e) { ...
69a339c792e2545cbd12c126a5b0865e4cf1e7e5
paystackapi/tests/test_product.py
paystackapi/tests/test_product.py
import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.product import Product # class TestProduct(BaseTestCase): # @httpretty.activate # def test_valid_create(self): # pass
import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.product import Product class TestProduct(BaseTestCase): @httpretty.activate def test_product_create(self): """Method defined to test product creation.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Product successfully created"}', status=201, ) response = Product.create( name="Product pypaystack test", description="my test description", price=500000, currency="NGN" ) self.assertTrue(response['status']) @httpretty.activate def test_product_list(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}', status=201, ) response = Product.list() self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.fetch(5499) self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.PUT, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.update(product_id=5499, name="Product pypaystack test", description="my test description", price=500000000, currency="USD" ) self.assertEqual(response['status'], True)
Add test cases for product.
Add test cases for product.
Python
mit
andela-sjames/paystack-python
python
## Code Before: import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.product import Product # class TestProduct(BaseTestCase): # @httpretty.activate # def test_valid_create(self): # pass ## Instruction: Add test cases for product. ## Code After: import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.product import Product class TestProduct(BaseTestCase): @httpretty.activate def test_product_create(self): """Method defined to test product creation.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Product successfully created"}', status=201, ) response = Product.create( name="Product pypaystack test", description="my test description", price=500000, currency="NGN" ) self.assertTrue(response['status']) @httpretty.activate def test_product_list(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}', status=201, ) response = Product.list() self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.fetch(5499) self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.PUT, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.update(product_id=5499, name="Product pypaystack test", description="my test description", price=500000000, currency="USD" ) self.assertEqual(response['status'], True)
... from paystackapi.product import Product class TestProduct(BaseTestCase): @httpretty.activate def test_product_create(self): """Method defined to test product creation.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Product successfully created"}', status=201, ) response = Product.create( name="Product pypaystack test", description="my test description", price=500000, currency="NGN" ) self.assertTrue(response['status']) @httpretty.activate def test_product_list(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}', status=201, ) response = Product.list() self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.fetch(5499) self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.PUT, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.update(product_id=5499, name="Product pypaystack test", description="my test description", price=500000000, currency="USD" ) self.assertEqual(response['status'], True) ...
a9d4acae16541eef1dd2f5b43f82d64f8f6d3260
factory/src/it/functional/src/test/java/com/google/auto/factory/DependencyInjectionIntegrationTest.java
factory/src/it/functional/src/test/java/com/google/auto/factory/DependencyInjectionIntegrationTest.java
package com.google.auto.factory; import static org.truth0.Truth.ASSERT; import org.junit.Test; import com.google.inject.Guice; import dagger.ObjectGraph; public class DependencyInjectionIntegrationTest { @Test public void daggerInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = ObjectGraph.create(DaggerModule.class).get(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } @Test public void guiceInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = Guice.createInjector(new GuiceModule()) .getInstance(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } }
package com.google.auto.factory; import static org.truth0.Truth.ASSERT; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import com.google.inject.Guice; import dagger.ObjectGraph; @RunWith(JUnit4.class) public class DependencyInjectionIntegrationTest { @Test public void daggerInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = ObjectGraph.create(DaggerModule.class).get(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } @Test public void guiceInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = Guice.createInjector(new GuiceModule()) .getInstance(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } }
Mark test with JUnit4 runner
Mark test with JUnit4 runner
Java
apache-2.0
tbroyer/auto,aglne/auto,agilemobiledev/auto,eamonnmcmanus/auto,gnanam336/auto,THEONE10211024/auto,rharter/auto,MaTriXy/auto,google/auto,Eagles2F/auto,THEONE10211024/auto,gk5885/auto,ruhaly/auto,agilemobiledev/auto,cgruber/auto,tbroyer/auto,adriancole/auto,Eagles2F/auto,ruhaly/auto,sitexa/auto,wuqiangxjtu/auto,eamonnmcmanus/auto,google/auto,google/auto,cgruber/auto,adriancole/auto,gnanam336/auto,sitexa/auto,wuqiangxjtu/auto,MaTriXy/auto,aglne/auto,eamonnmcmanus/auto,rharter/auto
java
## Code Before: package com.google.auto.factory; import static org.truth0.Truth.ASSERT; import org.junit.Test; import com.google.inject.Guice; import dagger.ObjectGraph; public class DependencyInjectionIntegrationTest { @Test public void daggerInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = ObjectGraph.create(DaggerModule.class).get(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } @Test public void guiceInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = Guice.createInjector(new GuiceModule()) .getInstance(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } } ## Instruction: Mark test with JUnit4 runner ## Code After: package com.google.auto.factory; import static org.truth0.Truth.ASSERT; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import com.google.inject.Guice; import dagger.ObjectGraph; @RunWith(JUnit4.class) public class DependencyInjectionIntegrationTest { @Test public void daggerInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = ObjectGraph.create(DaggerModule.class).get(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } @Test public void guiceInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = Guice.createInjector(new GuiceModule()) .getInstance(FactoryGeneratedFactory.class); FactoryGenerated one = factoryGeneratedFactory.create("A"); FactoryGenerated two = factoryGeneratedFactory.create("B"); ASSERT.that(one.name()).isEqualTo("A"); ASSERT.that(one.dependency()).isNotNull(); ASSERT.that(two.name()).isEqualTo("B"); ASSERT.that(two.dependency()).isNotNull(); ASSERT.that(one.dependency()).isNotEqualTo(two.dependency()); } }
// ... existing code ... import static org.truth0.Truth.ASSERT; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import com.google.inject.Guice; import dagger.ObjectGraph; @RunWith(JUnit4.class) public class DependencyInjectionIntegrationTest { @Test public void daggerInjectedFactory() { FactoryGeneratedFactory factoryGeneratedFactory = // ... rest of the code ...
1da520787717117b0413715f9a6df834f2d9e7e1
press_releases/migrations/0009_auto_20170519_1308.py
press_releases/migrations/0009_auto_20170519_1308.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_press_releases', '0008_auto_20161128_1049'), ] operations = [ migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this item", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_press_releases', '0008_auto_20161128_1049'), ] operations = [ migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this content", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True), ), ]
Change help text wording to follow WorkflowStateMixin
Change help text wording to follow WorkflowStateMixin
Python
mit
ic-labs/django-icekit,ic-labs/icekit-press-releases,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-press-releases
python
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_press_releases', '0008_auto_20161128_1049'), ] operations = [ migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this item", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True), ), ] ## Instruction: Change help text wording to follow WorkflowStateMixin ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_press_releases', '0008_auto_20161128_1049'), ] operations = [ migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this content", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True), ), ]
// ... existing code ... migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this content", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True), ), ] // ... rest of the code ...
bde09206bf308167a11bcb012753d10d845dc810
test_project/blog/models.py
test_project/blog/models.py
from django.db import models from django.contrib.auth.models import User class Entry(models.Model): content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class Comment(models.Model): post = models.ForeignKey(Entry, related_name='comments') content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class Actor(models.Model): name = models.CharField(max_length=32) class Movie(models.Model): name = models.CharField(max_length=32) actors = models.ManyToManyField(Actor, related_name='movies') score = models.IntegerField(default=0)
from django.db import models from django.contrib.auth.models import User class Entry(models.Model): content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class Comment(models.Model): post = models.ForeignKey(Entry, related_name='comments') content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class SmartTag(models.Model): entry = models.ForeignKey(Entry, related_name='smart_tags') name = models.CharField(max_length=32) class Actor(models.Model): name = models.CharField(max_length=32) class Movie(models.Model): name = models.CharField(max_length=32) actors = models.ManyToManyField(Actor, related_name='movies') score = models.IntegerField(default=0)
Create SmartTag model to demonstrate multi-word resource names.
Create SmartTag model to demonstrate multi-word resource names.
Python
bsd-3-clause
juanique/django-chocolate,juanique/django-chocolate,juanique/django-chocolate
python
## Code Before: from django.db import models from django.contrib.auth.models import User class Entry(models.Model): content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class Comment(models.Model): post = models.ForeignKey(Entry, related_name='comments') content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class Actor(models.Model): name = models.CharField(max_length=32) class Movie(models.Model): name = models.CharField(max_length=32) actors = models.ManyToManyField(Actor, related_name='movies') score = models.IntegerField(default=0) ## Instruction: Create SmartTag model to demonstrate multi-word resource names. ## Code After: from django.db import models from django.contrib.auth.models import User class Entry(models.Model): content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class Comment(models.Model): post = models.ForeignKey(Entry, related_name='comments') content = models.TextField() author = models.ForeignKey(User) created = models.DateTimeField() class SmartTag(models.Model): entry = models.ForeignKey(Entry, related_name='smart_tags') name = models.CharField(max_length=32) class Actor(models.Model): name = models.CharField(max_length=32) class Movie(models.Model): name = models.CharField(max_length=32) actors = models.ManyToManyField(Actor, related_name='movies') score = models.IntegerField(default=0)
# ... existing code ... created = models.DateTimeField() class SmartTag(models.Model): entry = models.ForeignKey(Entry, related_name='smart_tags') name = models.CharField(max_length=32) class Actor(models.Model): name = models.CharField(max_length=32) # ... rest of the code ...
b17104be53389604b4b7f5f109895bdaa6389e43
hic/flow.py
hic/flow.py
from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' # If a variable is only ever used by numexpr, flake8 will flag it as unused. # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
Add note about flake8 ignore flag.
Add note about flake8 ignore flag.
Python
mit
jbernhard/hic,Duke-QCD/hic
python
## Code Before: from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass ## Instruction: Add note about flake8 ignore flag. ## Code After: from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' # If a variable is only ever used by numexpr, flake8 will flag it as unused. # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
// ... existing code ... import numexpr as ne __all__ = 'qn', 'FlowCumulant' # If a variable is only ever used by numexpr, flake8 will flag it as unused. # The comment 'noqa' prevents this warning. def qn(n, phi): // ... rest of the code ...
94a55dfc68fcd2352f867b01fc703a202c87f453
troposphere/events.py
troposphere/events.py
from . import AWSObject, AWSProperty class Target(AWSProperty): props = { 'Arn': (basestring, True), 'Id': (basestring, True), 'Input': (basestring, False), 'InputPath': (basestring, False) } class Rule(AWSObject): resource_type = "AWS::Events::Rule" props = { 'Description': (basestring, False), 'EventPattern': (dict, False), 'Name': (basestring, False), 'RoleArn': (basestring, False), 'ScheduleExpression': (basestring, False), 'State': (basestring, False), 'Targets': ([Target], False) }
from . import AWSObject, AWSProperty class Target(AWSProperty): props = { 'Arn': (basestring, True), 'Id': (basestring, True), 'Input': (basestring, False), 'InputPath': (basestring, False), 'RoleArn': (basestring, False), } class Rule(AWSObject): resource_type = "AWS::Events::Rule" props = { 'Description': (basestring, False), 'EventPattern': (dict, False), 'Name': (basestring, False), 'ScheduleExpression': (basestring, False), 'State': (basestring, False), 'Targets': ([Target], False), }
Remove RoleArn from Events::Rule and add to Target property
Remove RoleArn from Events::Rule and add to Target property
Python
bsd-2-clause
pas256/troposphere,pas256/troposphere,ikben/troposphere,7digital/troposphere,ikben/troposphere,cloudtools/troposphere,7digital/troposphere,johnctitus/troposphere,johnctitus/troposphere,cloudtools/troposphere
python
## Code Before: from . import AWSObject, AWSProperty class Target(AWSProperty): props = { 'Arn': (basestring, True), 'Id': (basestring, True), 'Input': (basestring, False), 'InputPath': (basestring, False) } class Rule(AWSObject): resource_type = "AWS::Events::Rule" props = { 'Description': (basestring, False), 'EventPattern': (dict, False), 'Name': (basestring, False), 'RoleArn': (basestring, False), 'ScheduleExpression': (basestring, False), 'State': (basestring, False), 'Targets': ([Target], False) } ## Instruction: Remove RoleArn from Events::Rule and add to Target property ## Code After: from . import AWSObject, AWSProperty class Target(AWSProperty): props = { 'Arn': (basestring, True), 'Id': (basestring, True), 'Input': (basestring, False), 'InputPath': (basestring, False), 'RoleArn': (basestring, False), } class Rule(AWSObject): resource_type = "AWS::Events::Rule" props = { 'Description': (basestring, False), 'EventPattern': (dict, False), 'Name': (basestring, False), 'ScheduleExpression': (basestring, False), 'State': (basestring, False), 'Targets': ([Target], False), }
// ... existing code ... 'Arn': (basestring, True), 'Id': (basestring, True), 'Input': (basestring, False), 'InputPath': (basestring, False), 'RoleArn': (basestring, False), } // ... modified code ... 'Description': (basestring, False), 'EventPattern': (dict, False), 'Name': (basestring, False), 'ScheduleExpression': (basestring, False), 'State': (basestring, False), 'Targets': ([Target], False), } // ... rest of the code ...
a67c59454063a38afcaad7ac791a98bef900d73d
skyve-ejb/src/main/java/router/UxUis.java
skyve-ejb/src/main/java/router/UxUis.java
package router; import org.skyve.metadata.router.UxUi; public class UxUis { public static final UxUi PHONE = UxUi.newPrimeFaces("phone", "editorial", "nova-light"); public static final UxUi TABLET = UxUi.newPrimeFaces("tablet", "editorial", "nova-light"); public static final UxUi DESKTOP = UxUi.newSmartClient("desktop", "Tahoe", "casablanca"); public static final UxUi EXTERNAL = UxUi.newPrimeFaces("external", "editorial", "nova-light"); public static final UxUi STARTUP = UxUi.newPrimeFaces("startup", "editorial", "nova-light"); }
package router; import org.skyve.metadata.router.UxUi; // Only change theme attributes here. // Declare new UxUis in the DefaultUxUiSelector. public class UxUis { public static final UxUi PHONE = UxUi.newPrimeFaces("phone", "editorial", "nova-light"); public static final UxUi TABLET = UxUi.newPrimeFaces("tablet", "editorial", "nova-light"); public static final UxUi DESKTOP = UxUi.newSmartClient("desktop", "Tahoe", "casablanca"); public static final UxUi EXTERNAL = UxUi.newPrimeFaces("external", "editorial", "nova-light"); public static final UxUi STARTUP = UxUi.newPrimeFaces("startup", "editorial", "nova-light"); }
Add comment to UxUi in case of assemble.
Add comment to UxUi in case of assemble.
Java
lgpl-2.1
skyvers/skyve,skyvers/wildcat,skyvers/wildcat,skyvers/wildcat,skyvers/skyve,skyvers/skyve,skyvers/wildcat,skyvers/wildcat,skyvers/skyve,skyvers/skyve,skyvers/skyve,skyvers/skyve
java
## Code Before: package router; import org.skyve.metadata.router.UxUi; public class UxUis { public static final UxUi PHONE = UxUi.newPrimeFaces("phone", "editorial", "nova-light"); public static final UxUi TABLET = UxUi.newPrimeFaces("tablet", "editorial", "nova-light"); public static final UxUi DESKTOP = UxUi.newSmartClient("desktop", "Tahoe", "casablanca"); public static final UxUi EXTERNAL = UxUi.newPrimeFaces("external", "editorial", "nova-light"); public static final UxUi STARTUP = UxUi.newPrimeFaces("startup", "editorial", "nova-light"); } ## Instruction: Add comment to UxUi in case of assemble. ## Code After: package router; import org.skyve.metadata.router.UxUi; // Only change theme attributes here. // Declare new UxUis in the DefaultUxUiSelector. public class UxUis { public static final UxUi PHONE = UxUi.newPrimeFaces("phone", "editorial", "nova-light"); public static final UxUi TABLET = UxUi.newPrimeFaces("tablet", "editorial", "nova-light"); public static final UxUi DESKTOP = UxUi.newSmartClient("desktop", "Tahoe", "casablanca"); public static final UxUi EXTERNAL = UxUi.newPrimeFaces("external", "editorial", "nova-light"); public static final UxUi STARTUP = UxUi.newPrimeFaces("startup", "editorial", "nova-light"); }
# ... existing code ... import org.skyve.metadata.router.UxUi; // Only change theme attributes here. // Declare new UxUis in the DefaultUxUiSelector. public class UxUis { public static final UxUi PHONE = UxUi.newPrimeFaces("phone", "editorial", "nova-light"); public static final UxUi TABLET = UxUi.newPrimeFaces("tablet", "editorial", "nova-light"); # ... rest of the code ...
8a580e79156666d46fa59aae21fecb05746b25e2
src/platform.h
src/platform.h
/** * machina * * Copyright (c) 2011, drmats * All rights reserved. * * https://github.com/drmats/machina */ #ifndef __PLATFORM_H_ #define __PLATFORM_H_ 1 #if defined(linux) || defined(__linux) || defined(__linux__) #undef __LINUX__ #define __LINUX__ 1 #endif #if defined(WIN32) || defined(_WIN32) #undef __WIN32__ #define __WIN32__ 1 #endif #ifdef __WIN32__ #pragma warning( disable : 4290 ) #endif #endif
/** * machina * * Copyright (c) 2011, drmats * All rights reserved. * * https://github.com/drmats/machina */ #ifndef __PLATFORM_H_ #define __PLATFORM_H_ 1 #if defined(linux) || defined(__linux) || defined(__linux__) #undef __LINUX__ #define __LINUX__ 1 #endif #if defined(WIN32) || defined(_WIN32) #undef __WIN32__ #define __WIN32__ 1 #endif #if defined(__WIN32__) && defined(_MSC_VER) #pragma warning( disable : 4290 ) #endif #endif
Disable function exception specification warning only in cl (Visual Studio).
Disable function exception specification warning only in cl (Visual Studio).
C
bsd-2-clause
drmats/machina
c
## Code Before: /** * machina * * Copyright (c) 2011, drmats * All rights reserved. * * https://github.com/drmats/machina */ #ifndef __PLATFORM_H_ #define __PLATFORM_H_ 1 #if defined(linux) || defined(__linux) || defined(__linux__) #undef __LINUX__ #define __LINUX__ 1 #endif #if defined(WIN32) || defined(_WIN32) #undef __WIN32__ #define __WIN32__ 1 #endif #ifdef __WIN32__ #pragma warning( disable : 4290 ) #endif #endif ## Instruction: Disable function exception specification warning only in cl (Visual Studio). ## Code After: /** * machina * * Copyright (c) 2011, drmats * All rights reserved. * * https://github.com/drmats/machina */ #ifndef __PLATFORM_H_ #define __PLATFORM_H_ 1 #if defined(linux) || defined(__linux) || defined(__linux__) #undef __LINUX__ #define __LINUX__ 1 #endif #if defined(WIN32) || defined(_WIN32) #undef __WIN32__ #define __WIN32__ 1 #endif #if defined(__WIN32__) && defined(_MSC_VER) #pragma warning( disable : 4290 ) #endif #endif
... #endif #if defined(__WIN32__) && defined(_MSC_VER) #pragma warning( disable : 4290 ) #endif ...
24a4ae1ea31b8b239c63a6a432151b663aed6ba4
test/src/main/java/model/Customer.java
test/src/main/java/model/Customer.java
package model; import java.util.Set; import javax.persistence.Entity; import javax.persistence.OneToMany; /** * @author Emmanuel Bernard */ @Entity public class Customer extends User { private Set<Order> orders; public Set<Order> getOrders() { return orders; } @OneToMany public void setOrders(Set<Order> orders) { this.orders = orders; } }
package model; import java.util.Set; import javax.persistence.Entity; import javax.persistence.OneToMany; /** * @author Emmanuel Bernard */ @Entity public class Customer extends User { private Set<Order> orders; private String nonPersistent; public Set<Order> getOrders() { return orders; } @OneToMany public void setOrders(Set<Order> orders) { this.orders = orders; } }
Support for access type for entity and sub entity
Support for access type for entity and sub entity
Java
apache-2.0
hibernate/hibernate-metamodelgen,hibernate/hibernate-metamodelgen
java
## Code Before: package model; import java.util.Set; import javax.persistence.Entity; import javax.persistence.OneToMany; /** * @author Emmanuel Bernard */ @Entity public class Customer extends User { private Set<Order> orders; public Set<Order> getOrders() { return orders; } @OneToMany public void setOrders(Set<Order> orders) { this.orders = orders; } } ## Instruction: Support for access type for entity and sub entity ## Code After: package model; import java.util.Set; import javax.persistence.Entity; import javax.persistence.OneToMany; /** * @author Emmanuel Bernard */ @Entity public class Customer extends User { private Set<Order> orders; private String nonPersistent; public Set<Order> getOrders() { return orders; } @OneToMany public void setOrders(Set<Order> orders) { this.orders = orders; } }
# ... existing code ... @Entity public class Customer extends User { private Set<Order> orders; private String nonPersistent; public Set<Order> getOrders() { return orders; # ... rest of the code ...
3609df9044fd72008234bae9145487f315096fcd
hcalendar/__init__.py
hcalendar/__init__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ python-hcalendar is a basic hCalendar parser """ __version_info__ = { 'major': 0, 'minor': 2, 'micro': 0, 'releaselevel': 'final', } def get_version(): """ Return the formatted version information """ vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final': vers.append('%(releaselevel)s' % __version_info__) return ''.join(vers) __version__ = get_version() try: from .hcalendar import hCalendar except ImportError: pass __all__ = ['hCalendar']
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = { 'major': 0, 'minor': 2, 'micro': 0, 'releaselevel': 'final', } def get_version(): """ Return the formatted version information """ vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final': vers.append('%(releaselevel)s' % __version_info__) return ''.join(vers) __version__ = get_version() try: from .hcalendar import hCalendar except ImportError: pass __all__ = ['hCalendar']
Fix hcalendar module __doc__ missing
Fix hcalendar module __doc__ missing
Python
mit
mback2k/python-hcalendar
python
## Code Before: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ python-hcalendar is a basic hCalendar parser """ __version_info__ = { 'major': 0, 'minor': 2, 'micro': 0, 'releaselevel': 'final', } def get_version(): """ Return the formatted version information """ vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final': vers.append('%(releaselevel)s' % __version_info__) return ''.join(vers) __version__ = get_version() try: from .hcalendar import hCalendar except ImportError: pass __all__ = ['hCalendar'] ## Instruction: Fix hcalendar module __doc__ missing ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = { 'major': 0, 'minor': 2, 'micro': 0, 'releaselevel': 'final', } def get_version(): """ Return the formatted version information """ vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final': vers.append('%(releaselevel)s' % __version_info__) return ''.join(vers) __version__ = get_version() try: from .hcalendar import hCalendar except ImportError: pass __all__ = ['hCalendar']
... from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = { 'major': 0, ...
c07234bb3142df96dc9e02a236975bc3de2415cc
nailgun/nailgun/test/test_plugin.py
nailgun/nailgun/test/test_plugin.py
from nailgun.test.base import BaseHandlers class TestPluginStateMachine(BaseHandlers): def test_attrs_creation(self): pass
from nailgun.test.base import BaseHandlers from nailgun.plugin.process import get_queue, PluginProcessor from nailgun.api.models import Task class TestPluginProcess(BaseHandlers): def setUp(self): super(TestPluginProcess, self).setUp() self.plugin_processor = PluginProcessor() self.plugin_processor.start() def tearDown(self): super(TestPluginProcess, self).tearDown() self.plugin_processor.terminate() def test_task_set_to_error_when_exception_raised(self): queue = get_queue() task = Task(name='install_plugin', cache={'plugin_id': -1}) self.env.db.add(task) self.env.db.commit() queue.put(task.uuid) def check_task_status_is_error(): self.env.db.refresh(task) return task.status == 'error' self.env.wait_for_true(check_task_status_is_error, timeout=2) self.assertEquals(task.progress, 100)
Implement plugin test on exception handling
Implement plugin test on exception handling
Python
apache-2.0
SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,zhaochao/fuel-main,zhaochao/fuel-main,huntxu/fuel-main,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-main,SmartInfrastructures/fuel-main-dev,huntxu/fuel-web,teselkin/fuel-main,ddepaoli3/fuel-main-dev,teselkin/fuel-main,SmartInfrastructures/fuel-web-dev,SergK/fuel-main,dancn/fuel-main-dev,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SergK/fuel-main,zhaochao/fuel-web,dancn/fuel-main-dev,nebril/fuel-web,dancn/fuel-main-dev,AnselZhangGit/fuel-main,Fiware/ops.Fuel-main-dev,AnselZhangGit/fuel-main,nebril/fuel-web,SmartInfrastructures/fuel-main-dev,eayunstack/fuel-web,AnselZhangGit/fuel-main,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-main,SergK/fuel-main,prmtl/fuel-web,zhaochao/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,zhaochao/fuel-web,SmartInfrastructures/fuel-web-dev,koder-ua/nailgun-fcert,koder-ua/nailgun-fcert,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,zhaochao/fuel-web,zhaochao/fuel-web,teselkin/fuel-main,stackforge/fuel-main,prmtl/fuel-web,AnselZhangGit/fuel-main,zhaochao/fuel-main,eayunstack/fuel-web,eayunstack/fuel-main,koder-ua/nailgun-fcert,stackforge/fuel-web,nebril/fuel-web,stackforge/fuel-web,zhaochao/fuel-main,ddepaoli3/fuel-main-dev,Fiware/ops.Fuel-main-dev,prmtl/fuel-web,SmartInfrastructures/fuel-main-dev,Fiware/ops.Fuel-main-dev,zhaochao/fuel-main,koder-ua/nailgun-fcert,huntxu/fuel-web,dancn/fuel-main-dev,stackforge/fuel-main,eayunstack/fuel-web,prmtl/fuel-web,stackforge/fuel-web,huntxu/fuel-web,huntxu/fuel-main,Fiware/ops.Fuel-main-dev,teselkin/fuel-main,nebril/fuel-web,eayunstack/fuel-web,stackforge/fuel-main
python
## Code Before: from nailgun.test.base import BaseHandlers class TestPluginStateMachine(BaseHandlers): def test_attrs_creation(self): pass ## Instruction: Implement plugin test on exception handling ## Code After: from nailgun.test.base import BaseHandlers from nailgun.plugin.process import get_queue, PluginProcessor from nailgun.api.models import Task class TestPluginProcess(BaseHandlers): def setUp(self): super(TestPluginProcess, self).setUp() self.plugin_processor = PluginProcessor() self.plugin_processor.start() def tearDown(self): super(TestPluginProcess, self).tearDown() self.plugin_processor.terminate() def test_task_set_to_error_when_exception_raised(self): queue = get_queue() task = Task(name='install_plugin', cache={'plugin_id': -1}) self.env.db.add(task) self.env.db.commit() queue.put(task.uuid) def check_task_status_is_error(): self.env.db.refresh(task) return task.status == 'error' self.env.wait_for_true(check_task_status_is_error, timeout=2) self.assertEquals(task.progress, 100)
// ... existing code ... from nailgun.test.base import BaseHandlers from nailgun.plugin.process import get_queue, PluginProcessor from nailgun.api.models import Task class TestPluginProcess(BaseHandlers): def setUp(self): super(TestPluginProcess, self).setUp() self.plugin_processor = PluginProcessor() self.plugin_processor.start() def tearDown(self): super(TestPluginProcess, self).tearDown() self.plugin_processor.terminate() def test_task_set_to_error_when_exception_raised(self): queue = get_queue() task = Task(name='install_plugin', cache={'plugin_id': -1}) self.env.db.add(task) self.env.db.commit() queue.put(task.uuid) def check_task_status_is_error(): self.env.db.refresh(task) return task.status == 'error' self.env.wait_for_true(check_task_status_is_error, timeout=2) self.assertEquals(task.progress, 100) // ... rest of the code ...
d33a624fa6aedb93ae43ba1d2c0f6a76d90ff4a6
foldermd5sums.py
foldermd5sums.py
import json import os import sys import hashlib def get_md5sums(directory): md5sums = [] for filename in os.listdir(directory): md5 = hashlib.md5() with open(os.path.join(directory, filename), 'rb') as fp: for chunk in iter(lambda: fp.read(128 * md5.block_size), b''): md5.update(chunk) md5hash = md5.hexdigest() md5sums.append((filename, md5hash)) return md5sums if __name__ == '__main__': if len(sys.argv) < 3: print('Usage: ' + sys.argv[0] + ' input_directory output.json') sys.exit(1) directory = sys.argv[1] if not os.path.exists(directory): print('Directory does not exist!') sys.exit(1) output_json = sys.argv[2] md5sums = get_md5sums(directory) with open(output_json, 'w') as fp: json.dump(md5sums, fp, indent=0)
import json import os import sys import hashlib def get_relative_filepaths(base_directory): """ Return a list of file paths without the base_directory prefix""" file_list = [] for root, subFolders, files in os.walk('Data'): relative_path="/".join(root.split('/')[1:]) for file in files: file_list.append(os.path.join(relative_path,file)) return file_list def get_md5sums(base_directory): md5sums = [] for filename in get_relative_filepaths(base_directory): md5 = hashlib.md5() full_filepath = os.path.join(base_directory, filename) with open(full_filepath, 'rb') as fp: for chunk in iter(lambda: fp.read(128 * md5.block_size), b''): md5.update(chunk) md5hash = md5.hexdigest() md5sums.append((filename, md5hash)) return md5sums if __name__ == '__main__': if len(sys.argv) < 3: print('Usage: ' + sys.argv[0] + ' input_directory output.json') sys.exit(1) directory = sys.argv[1] if not os.path.exists(directory): print('Directory does not exist!') sys.exit(1) output_json = sys.argv[2] md5sums = get_md5sums(directory) with open(output_json, 'w') as fp: json.dump(md5sums, fp, indent=0)
Allow directory of files to be indexed
ENH: Allow directory of files to be indexed In the Data directory, there may be sub-directories of files that need to be kept separate, but all of them need to be indexed.
Python
apache-2.0
zivy/SimpleITK-Notebooks,InsightSoftwareConsortium/SimpleITK-Notebooks,InsightSoftwareConsortium/SimpleITK-Notebooks,zivy/SimpleITK-Notebooks,InsightSoftwareConsortium/SimpleITK-Notebooks,thewtex/SimpleITK-Notebooks,zivy/SimpleITK-Notebooks,thewtex/SimpleITK-Notebooks,thewtex/SimpleITK-Notebooks
python
## Code Before: import json import os import sys import hashlib def get_md5sums(directory): md5sums = [] for filename in os.listdir(directory): md5 = hashlib.md5() with open(os.path.join(directory, filename), 'rb') as fp: for chunk in iter(lambda: fp.read(128 * md5.block_size), b''): md5.update(chunk) md5hash = md5.hexdigest() md5sums.append((filename, md5hash)) return md5sums if __name__ == '__main__': if len(sys.argv) < 3: print('Usage: ' + sys.argv[0] + ' input_directory output.json') sys.exit(1) directory = sys.argv[1] if not os.path.exists(directory): print('Directory does not exist!') sys.exit(1) output_json = sys.argv[2] md5sums = get_md5sums(directory) with open(output_json, 'w') as fp: json.dump(md5sums, fp, indent=0) ## Instruction: ENH: Allow directory of files to be indexed In the Data directory, there may be sub-directories of files that need to be kept separate, but all of them need to be indexed. ## Code After: import json import os import sys import hashlib def get_relative_filepaths(base_directory): """ Return a list of file paths without the base_directory prefix""" file_list = [] for root, subFolders, files in os.walk('Data'): relative_path="/".join(root.split('/')[1:]) for file in files: file_list.append(os.path.join(relative_path,file)) return file_list def get_md5sums(base_directory): md5sums = [] for filename in get_relative_filepaths(base_directory): md5 = hashlib.md5() full_filepath = os.path.join(base_directory, filename) with open(full_filepath, 'rb') as fp: for chunk in iter(lambda: fp.read(128 * md5.block_size), b''): md5.update(chunk) md5hash = md5.hexdigest() md5sums.append((filename, md5hash)) return md5sums if __name__ == '__main__': if len(sys.argv) < 3: print('Usage: ' + sys.argv[0] + ' input_directory output.json') sys.exit(1) directory = sys.argv[1] if not os.path.exists(directory): print('Directory does not exist!') sys.exit(1) output_json = sys.argv[2] md5sums = get_md5sums(directory) with open(output_json, 'w') as fp: json.dump(md5sums, fp, indent=0)
# ... existing code ... import sys import hashlib def get_relative_filepaths(base_directory): """ Return a list of file paths without the base_directory prefix""" file_list = [] for root, subFolders, files in os.walk('Data'): relative_path="/".join(root.split('/')[1:]) for file in files: file_list.append(os.path.join(relative_path,file)) return file_list def get_md5sums(base_directory): md5sums = [] for filename in get_relative_filepaths(base_directory): md5 = hashlib.md5() full_filepath = os.path.join(base_directory, filename) with open(full_filepath, 'rb') as fp: for chunk in iter(lambda: fp.read(128 * md5.block_size), b''): md5.update(chunk) md5hash = md5.hexdigest() md5sums.append((filename, md5hash)) return md5sums if __name__ == '__main__': # ... rest of the code ...
d1a2a4c2ee7fda2bfde369bb6311719e72c75a3d
corehq/blobs/tasks.py
corehq/blobs/tasks.py
from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() paths = [] bytes_deleted = 0 for blob_expiration in blob_expirations: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) bytes_deleted += blob_expiration.length db.bulk_delete(paths) blob_expirations.update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) return bytes_deleted def _utcnow(): return datetime.utcnow()
from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() total_bytes_deleted = 0 while blob_expirations.exists(): paths = [] deleted_ids = [] bytes_deleted = 0 for blob_expiration in blob_expirations[:1000]: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) deleted_ids.append(blob_expiration.id) bytes_deleted += blob_expiration.length db.bulk_delete(paths) BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) total_bytes_deleted += bytes_deleted return total_bytes_deleted def _utcnow(): return datetime.utcnow()
Delete expired blobs in batches
Delete expired blobs in batches
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
python
## Code Before: from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() paths = [] bytes_deleted = 0 for blob_expiration in blob_expirations: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) bytes_deleted += blob_expiration.length db.bulk_delete(paths) blob_expirations.update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) return bytes_deleted def _utcnow(): return datetime.utcnow() ## Instruction: Delete expired blobs in batches ## Code After: from __future__ import absolute_import from datetime import datetime from celery.task import periodic_task from celery.schedules import crontab from corehq.util.datadog.gauges import datadog_counter from corehq.blobs.models import BlobExpiration from corehq.blobs import get_blob_db @periodic_task(run_every=crontab(minute=0, hour='0,12')) def delete_expired_blobs(): blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() total_bytes_deleted = 0 while blob_expirations.exists(): paths = [] deleted_ids = [] bytes_deleted = 0 for blob_expiration in blob_expirations[:1000]: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) deleted_ids.append(blob_expiration.id) bytes_deleted += blob_expiration.length db.bulk_delete(paths) BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) total_bytes_deleted += bytes_deleted return total_bytes_deleted def _utcnow(): return datetime.utcnow()
// ... existing code ... blob_expirations = BlobExpiration.objects.filter(expires_on__lt=_utcnow(), deleted=False) db = get_blob_db() total_bytes_deleted = 0 while blob_expirations.exists(): paths = [] deleted_ids = [] bytes_deleted = 0 for blob_expiration in blob_expirations[:1000]: paths.append(db.get_path(blob_expiration.identifier, blob_expiration.bucket)) deleted_ids.append(blob_expiration.id) bytes_deleted += blob_expiration.length db.bulk_delete(paths) BlobExpiration.objects.filter(id__in=deleted_ids).update(deleted=True) datadog_counter( 'commcare.temp_blobs.bytes_deleted', value=bytes_deleted, ) total_bytes_deleted += bytes_deleted return total_bytes_deleted def _utcnow(): // ... rest of the code ...
7517fc46387ee998ab3b517ca38e8c003c431a5d
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'description': """MiniCPS is a lightweight simulator for accurate network traffic in an industrial control system, with basic support for physical layer interaction.""", 'author': 'scy-phy', 'url': 'https://github.com/scy-phy/minicps', 'download_url': 'https://github.com/scy-phy/minicps', 'author email': '[email protected]', 'version': '0.1.0', 'install_requires': [ 'cpppo', 'networkx', 'matplotlib', 'nose', 'nose-cover3', ], 'package': ['minicps'], 'scripts': [], 'name': 'minicps' } setup(**config)
try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'description': """MiniCPS is a lightweight simulator for accurate network traffic in an industrial control system, with basic support for physical layer interaction.""", 'author': 'scy-phy', 'url': 'https://github.com/scy-phy/minicps', 'download_url': 'https://github.com/scy-phy/minicps', 'author email': '[email protected]', 'version': '1.0.0', 'install_requires': [ 'cpppo', 'nose', 'coverage', ], 'package': ['minicps'], 'scripts': [], 'name': 'minicps' } setup(**config)
Update version, email and requirements
Update version, email and requirements [ci skip]
Python
mit
remmihsorp/minicps,scy-phy/minicps,scy-phy/minicps,remmihsorp/minicps
python
## Code Before: try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'description': """MiniCPS is a lightweight simulator for accurate network traffic in an industrial control system, with basic support for physical layer interaction.""", 'author': 'scy-phy', 'url': 'https://github.com/scy-phy/minicps', 'download_url': 'https://github.com/scy-phy/minicps', 'author email': '[email protected]', 'version': '0.1.0', 'install_requires': [ 'cpppo', 'networkx', 'matplotlib', 'nose', 'nose-cover3', ], 'package': ['minicps'], 'scripts': [], 'name': 'minicps' } setup(**config) ## Instruction: Update version, email and requirements [ci skip] ## Code After: try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'description': """MiniCPS is a lightweight simulator for accurate network traffic in an industrial control system, with basic support for physical layer interaction.""", 'author': 'scy-phy', 'url': 'https://github.com/scy-phy/minicps', 'download_url': 'https://github.com/scy-phy/minicps', 'author email': '[email protected]', 'version': '1.0.0', 'install_requires': [ 'cpppo', 'nose', 'coverage', ], 'package': ['minicps'], 'scripts': [], 'name': 'minicps' } setup(**config)
// ... existing code ... 'download_url': 'https://github.com/scy-phy/minicps', 'author email': '[email protected]', 'version': '1.0.0', 'install_requires': [ 'cpppo', 'nose', 'coverage', ], 'package': ['minicps'], // ... rest of the code ...
25e7b4a2e297e9944b5065851c6e65eb40b11bcd
scripts/examples/OpenMV/99-Tests/unittests.py
scripts/examples/OpenMV/99-Tests/unittests.py
import os, sensor, gc TEST_DIR = "unittest" TEMP_DIR = "unittest/temp" DATA_DIR = "unittest/data" SCRIPT_DIR = "unittest/script" if not (TEST_DIR in os.listdir("")): raise Exception('Unittest dir not found!') print("") test_failed = False def print_result(test, passed): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + ("PASSED" if passed == True else "FAILED")) for test in sorted(os.listdir(SCRIPT_DIR)): if test.endswith(".py"): test_passed = True test_path = "/".join((SCRIPT_DIR, test)) try: exec(open(test_path).read()) gc.collect() if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e: test_failed = True test_passed = False print_result(test, test_passed) if test_failed: print("\nSome tests have FAILED!!!\n\n") else: print("\nAll tests PASSED.\n\n")
import os, sensor, gc TEST_DIR = "unittest" TEMP_DIR = "unittest/temp" DATA_DIR = "unittest/data" SCRIPT_DIR = "unittest/script" if not (TEST_DIR in os.listdir("")): raise Exception('Unittest dir not found!') print("") test_failed = False def print_result(test, result): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + result) for test in sorted(os.listdir(SCRIPT_DIR)): if test.endswith(".py"): test_result = "PASSED" test_path = "/".join((SCRIPT_DIR, test)) try: exec(open(test_path).read()) gc.collect() if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e: test_failed = True test_result = "DISABLED" if "unavailable" in str(e) else "FAILED" print_result(test, test_result) if test_failed: print("\nSome tests have FAILED!!!\n\n") else: print("\nAll tests PASSED.\n\n")
Update unittest to ignore disabled functions.
Update unittest to ignore disabled functions.
Python
mit
kwagyeman/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,openmv/openmv,iabdalkader/openmv,openmv/openmv
python
## Code Before: import os, sensor, gc TEST_DIR = "unittest" TEMP_DIR = "unittest/temp" DATA_DIR = "unittest/data" SCRIPT_DIR = "unittest/script" if not (TEST_DIR in os.listdir("")): raise Exception('Unittest dir not found!') print("") test_failed = False def print_result(test, passed): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + ("PASSED" if passed == True else "FAILED")) for test in sorted(os.listdir(SCRIPT_DIR)): if test.endswith(".py"): test_passed = True test_path = "/".join((SCRIPT_DIR, test)) try: exec(open(test_path).read()) gc.collect() if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e: test_failed = True test_passed = False print_result(test, test_passed) if test_failed: print("\nSome tests have FAILED!!!\n\n") else: print("\nAll tests PASSED.\n\n") ## Instruction: Update unittest to ignore disabled functions. ## Code After: import os, sensor, gc TEST_DIR = "unittest" TEMP_DIR = "unittest/temp" DATA_DIR = "unittest/data" SCRIPT_DIR = "unittest/script" if not (TEST_DIR in os.listdir("")): raise Exception('Unittest dir not found!') print("") test_failed = False def print_result(test, result): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + result) for test in sorted(os.listdir(SCRIPT_DIR)): if test.endswith(".py"): test_result = "PASSED" test_path = "/".join((SCRIPT_DIR, test)) try: exec(open(test_path).read()) gc.collect() if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e: test_failed = True test_result = "DISABLED" if "unavailable" in str(e) else "FAILED" print_result(test, test_result) if test_failed: print("\nSome tests have FAILED!!!\n\n") else: print("\nAll tests PASSED.\n\n")
... print("") test_failed = False def print_result(test, result): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + result) for test in sorted(os.listdir(SCRIPT_DIR)): if test.endswith(".py"): test_result = "PASSED" test_path = "/".join((SCRIPT_DIR, test)) try: exec(open(test_path).read()) ... raise Exception() except Exception as e: test_failed = True test_result = "DISABLED" if "unavailable" in str(e) else "FAILED" print_result(test, test_result) if test_failed: print("\nSome tests have FAILED!!!\n\n") ...
849552b1a2afdd89552e7c0395fc7be1786d5cbc
pybossa/auth/user.py
pybossa/auth/user.py
from flask.ext.login import current_user def create(user=None): if current_user.is_authenticated(): if current_user.admin: return True else: return False else: return False def read(user=None): return True def update(user): return create(user) def delete(user): return update(user)
from flask.ext.login import current_user def create(user=None): # pragma: no cover if current_user.is_authenticated(): if current_user.admin: return True else: return False else: return False def read(user=None): # pragma: no cover return True def update(user): # pragma: no cover return create(user) def delete(user): # pragma: no cover return update(user)
Exclude it from coverage as these permissions are not used yet.
Exclude it from coverage as these permissions are not used yet.
Python
agpl-3.0
PyBossa/pybossa,PyBossa/pybossa,CulturePlex/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,stefanhahmann/pybossa,geotagx/pybossa,geotagx/pybossa,CulturePlex/pybossa,OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,proyectos-analizo-info/pybossa-analizo-info,jean/pybossa,CulturePlex/pybossa,inteligencia-coletiva-lsd/pybossa,harihpr/tweetclickers,Scifabric/pybossa,OpenNewsLabs/pybossa
python
## Code Before: from flask.ext.login import current_user def create(user=None): if current_user.is_authenticated(): if current_user.admin: return True else: return False else: return False def read(user=None): return True def update(user): return create(user) def delete(user): return update(user) ## Instruction: Exclude it from coverage as these permissions are not used yet. ## Code After: from flask.ext.login import current_user def create(user=None): # pragma: no cover if current_user.is_authenticated(): if current_user.admin: return True else: return False else: return False def read(user=None): # pragma: no cover return True def update(user): # pragma: no cover return create(user) def delete(user): # pragma: no cover return update(user)
# ... existing code ... from flask.ext.login import current_user def create(user=None): # pragma: no cover if current_user.is_authenticated(): if current_user.admin: return True # ... modified code ... return False def read(user=None): # pragma: no cover return True def update(user): # pragma: no cover return create(user) def delete(user): # pragma: no cover return update(user) # ... rest of the code ...
83d7e1431024cd18faae6cba6543c5c0ae97e5af
proxy/src/main/java/com/wavefront/agent/DisableGZIPEncodingInterceptor.java
proxy/src/main/java/com/wavefront/agent/DisableGZIPEncodingInterceptor.java
package com.wavefront.agent; import org.jboss.resteasy.resteasy_jaxrs.i18n.LogMessages; import java.io.IOException; import javax.ws.rs.WebApplicationException; import javax.ws.rs.ext.WriterInterceptor; import javax.ws.rs.ext.WriterInterceptorContext; /** * This RESTEasy interceptor allows disabling GZIP compression even for methods annotated with @GZIP by removing the * Content-Encoding header. * RESTEasy always adds "Content-Encoding: gzip" header when it encounters @GZIP annotation, but if the request body * is actually sent uncompressed, it violates section 3.1.2.2 of RFC7231. * * Created by [email protected] on 6/9/17. */ public class DisableGZIPEncodingInterceptor implements WriterInterceptor { public DisableGZIPEncodingInterceptor() { } public void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { LogMessages.LOGGER.debugf("Interceptor : %s, Method : aroundWriteTo", this.getClass().getName()); Object encoding = context.getHeaders().getFirst("Content-Encoding"); if(encoding != null && encoding.toString().equalsIgnoreCase("gzip")) { context.getHeaders().remove("Content-Encoding"); } context.proceed(); } }
package com.wavefront.agent; import java.io.IOException; import java.util.logging.Logger; import javax.ws.rs.WebApplicationException; import javax.ws.rs.ext.WriterInterceptor; import javax.ws.rs.ext.WriterInterceptorContext; /** * This RESTEasy interceptor allows disabling GZIP compression even for methods annotated with @GZIP by removing the * Content-Encoding header. * RESTEasy always adds "Content-Encoding: gzip" header when it encounters @GZIP annotation, but if the request body * is actually sent uncompressed, it violates section 3.1.2.2 of RFC7231. * * Created by [email protected] on 6/9/17. */ public class DisableGZIPEncodingInterceptor implements WriterInterceptor { private static final Logger logger = Logger.getLogger(DisableGZIPEncodingInterceptor.class.getCanonicalName()); public DisableGZIPEncodingInterceptor() { } public void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { logger.fine("Interceptor : " + this.getClass().getName() + ", Method : aroundWriteTo"); Object encoding = context.getHeaders().getFirst("Content-Encoding"); if (encoding != null && encoding.toString().equalsIgnoreCase("gzip")) { context.getHeaders().remove("Content-Encoding"); } context.proceed(); } }
Use proxy logger + fix formatting
Use proxy logger + fix formatting
Java
apache-2.0
moribellamy/java,moribellamy/java,moribellamy/java
java
## Code Before: package com.wavefront.agent; import org.jboss.resteasy.resteasy_jaxrs.i18n.LogMessages; import java.io.IOException; import javax.ws.rs.WebApplicationException; import javax.ws.rs.ext.WriterInterceptor; import javax.ws.rs.ext.WriterInterceptorContext; /** * This RESTEasy interceptor allows disabling GZIP compression even for methods annotated with @GZIP by removing the * Content-Encoding header. * RESTEasy always adds "Content-Encoding: gzip" header when it encounters @GZIP annotation, but if the request body * is actually sent uncompressed, it violates section 3.1.2.2 of RFC7231. * * Created by [email protected] on 6/9/17. */ public class DisableGZIPEncodingInterceptor implements WriterInterceptor { public DisableGZIPEncodingInterceptor() { } public void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { LogMessages.LOGGER.debugf("Interceptor : %s, Method : aroundWriteTo", this.getClass().getName()); Object encoding = context.getHeaders().getFirst("Content-Encoding"); if(encoding != null && encoding.toString().equalsIgnoreCase("gzip")) { context.getHeaders().remove("Content-Encoding"); } context.proceed(); } } ## Instruction: Use proxy logger + fix formatting ## Code After: package com.wavefront.agent; import java.io.IOException; import java.util.logging.Logger; import javax.ws.rs.WebApplicationException; import javax.ws.rs.ext.WriterInterceptor; import javax.ws.rs.ext.WriterInterceptorContext; /** * This RESTEasy interceptor allows disabling GZIP compression even for methods annotated with @GZIP by removing the * Content-Encoding header. * RESTEasy always adds "Content-Encoding: gzip" header when it encounters @GZIP annotation, but if the request body * is actually sent uncompressed, it violates section 3.1.2.2 of RFC7231. * * Created by [email protected] on 6/9/17. */ public class DisableGZIPEncodingInterceptor implements WriterInterceptor { private static final Logger logger = Logger.getLogger(DisableGZIPEncodingInterceptor.class.getCanonicalName()); public DisableGZIPEncodingInterceptor() { } public void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { logger.fine("Interceptor : " + this.getClass().getName() + ", Method : aroundWriteTo"); Object encoding = context.getHeaders().getFirst("Content-Encoding"); if (encoding != null && encoding.toString().equalsIgnoreCase("gzip")) { context.getHeaders().remove("Content-Encoding"); } context.proceed(); } }
// ... existing code ... package com.wavefront.agent; import java.io.IOException; import java.util.logging.Logger; import javax.ws.rs.WebApplicationException; import javax.ws.rs.ext.WriterInterceptor; // ... modified code ... * Created by [email protected] on 6/9/17. */ public class DisableGZIPEncodingInterceptor implements WriterInterceptor { private static final Logger logger = Logger.getLogger(DisableGZIPEncodingInterceptor.class.getCanonicalName()); public DisableGZIPEncodingInterceptor() { } public void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { logger.fine("Interceptor : " + this.getClass().getName() + ", Method : aroundWriteTo"); Object encoding = context.getHeaders().getFirst("Content-Encoding"); if (encoding != null && encoding.toString().equalsIgnoreCase("gzip")) { context.getHeaders().remove("Content-Encoding"); } context.proceed(); } } // ... rest of the code ...
99ad1572b7ed82443816c5aa8025e8a41332dae9
src/math.c
src/math.c
/*------------------------------------------------------------------------------ | NuCTex | math.c | Author | Benjamin A - Nullsrc | Created | 30 December, 2015 | Changed | 31 December, 2015 |------------------------------------------------------------------------------- | Overview | Implementation of various mathematical functions used in the game \-----------------------------------------------------------------------------*/ #include "math.h" #include <stdio.h> #include <stdlib.h> void initRand() { srand(time(NULL)); } int rng(int low, int high) { high += 1; int temp = low; int addend = rand() % (high - low); temp += addend; return temp; } int zrng(int range) { int temp = rand() % (range + 1); return temp; } int brng() { int temp = rand() % 2; return temp; } int calcDamage(int strength) { int damageTotal = 0; damageTotal = rng(strength - (strength/4), strength + (strength/5)); return damageTotal; } int runAway(int escapingAgility, int chasingAgility) { if(escapingAgility > chasingAgility) { return 1; } else { return 0; } }
/*------------------------------------------------------------------------------ | NuCTex | math.c | Author | Benjamin A - Nullsrc | Created | 30 December, 2015 | Changed | 31 December, 2015 |------------------------------------------------------------------------------- | Overview | Implementation of various mathematical functions used in the game \-----------------------------------------------------------------------------*/ #include "math.h" #include <stdio.h> #include <stdlib.h> void initRand() { srand(time(NULL)); } int rng(int low, int high) { high += 1; int temp = low; int addend = rand() % (high - low); temp += addend; return temp; } int zrng(int range) { int temp = rand() % (range + 1); return temp; } int brng() { int temp = rand() % 2; return temp; } int calcDamage(int strength) { int damageTotal = 0; damageTotal = rng(strength - (strength/4), strength + (strength/5)); return damageTotal; } int runAway(int escapingAgility, int chasingAgility) { if((escapingAgility + zrng(escapingAgility/3)) > chasingAgility) { return 1; } else { return 0; } }
Add RNG to running away.
Add RNG to running away.
C
mit
Nullsrc/nuctex,Nullsrc/nuctex
c
## Code Before: /*------------------------------------------------------------------------------ | NuCTex | math.c | Author | Benjamin A - Nullsrc | Created | 30 December, 2015 | Changed | 31 December, 2015 |------------------------------------------------------------------------------- | Overview | Implementation of various mathematical functions used in the game \-----------------------------------------------------------------------------*/ #include "math.h" #include <stdio.h> #include <stdlib.h> void initRand() { srand(time(NULL)); } int rng(int low, int high) { high += 1; int temp = low; int addend = rand() % (high - low); temp += addend; return temp; } int zrng(int range) { int temp = rand() % (range + 1); return temp; } int brng() { int temp = rand() % 2; return temp; } int calcDamage(int strength) { int damageTotal = 0; damageTotal = rng(strength - (strength/4), strength + (strength/5)); return damageTotal; } int runAway(int escapingAgility, int chasingAgility) { if(escapingAgility > chasingAgility) { return 1; } else { return 0; } } ## Instruction: Add RNG to running away. ## Code After: /*------------------------------------------------------------------------------ | NuCTex | math.c | Author | Benjamin A - Nullsrc | Created | 30 December, 2015 | Changed | 31 December, 2015 |------------------------------------------------------------------------------- | Overview | Implementation of various mathematical functions used in the game \-----------------------------------------------------------------------------*/ #include "math.h" #include <stdio.h> #include <stdlib.h> void initRand() { srand(time(NULL)); } int rng(int low, int high) { high += 1; int temp = low; int addend = rand() % (high - low); temp += addend; return temp; } int zrng(int range) { int temp = rand() % (range + 1); return temp; } int brng() { int temp = rand() % 2; return temp; } int calcDamage(int strength) { int damageTotal = 0; damageTotal = rng(strength - (strength/4), strength + (strength/5)); return damageTotal; } int runAway(int escapingAgility, int chasingAgility) { if((escapingAgility + zrng(escapingAgility/3)) > chasingAgility) { return 1; } else { return 0; } }
# ... existing code ... } int runAway(int escapingAgility, int chasingAgility) { if((escapingAgility + zrng(escapingAgility/3)) > chasingAgility) { return 1; } else { # ... rest of the code ...
899882be398f8a31e706a590c0a7e297c1589c25
threat_intel/util/error_messages.py
threat_intel/util/error_messages.py
import sys from traceback import extract_tb from traceback import format_list def write_exception(e): exc_type, __, exc_traceback = sys.exc_info() sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else '')) for line in format_list(extract_tb(exc_traceback)): sys.stderr.write(line) def write_error_message(message): sys.stderr.write('[ERROR] ') sys.stderr.write(message) sys.stderr.write('\n')
import sys from traceback import extract_tb from traceback import format_list def write_exception(e): exc_type, __, exc_traceback = sys.exc_info() sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e))) for line in format_list(extract_tb(exc_traceback)): sys.stderr.write(line) def write_error_message(message): sys.stderr.write('[ERROR] ') sys.stderr.write(message) sys.stderr.write('\n')
Fix deprecation warning interfering with tests
Fix deprecation warning interfering with tests
Python
mit
Yelp/threat_intel,megancarney/threat_intel,SYNchroACK/threat_intel
python
## Code Before: import sys from traceback import extract_tb from traceback import format_list def write_exception(e): exc_type, __, exc_traceback = sys.exc_info() sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else '')) for line in format_list(extract_tb(exc_traceback)): sys.stderr.write(line) def write_error_message(message): sys.stderr.write('[ERROR] ') sys.stderr.write(message) sys.stderr.write('\n') ## Instruction: Fix deprecation warning interfering with tests ## Code After: import sys from traceback import extract_tb from traceback import format_list def write_exception(e): exc_type, __, exc_traceback = sys.exc_info() sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e))) for line in format_list(extract_tb(exc_traceback)): sys.stderr.write(line) def write_error_message(message): sys.stderr.write('[ERROR] ') sys.stderr.write(message) sys.stderr.write('\n')
... def write_exception(e): exc_type, __, exc_traceback = sys.exc_info() sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e))) for line in format_list(extract_tb(exc_traceback)): sys.stderr.write(line) ...
662101e89943fe62b7036894140272e2f9ea4f78
ibmcnx/test/test.py
ibmcnx/test/test.py
import ibmcnx.test.loadFunction loadFilesService() FilesPolicyService.browse( "title", "true", 1, 25 )
import ibmcnx.test.loadFunction ibmcnx.test.loadFunction.loadFilesService() FilesPolicyService.browse( "title", "true", 1, 25 )
Customize scripts to work with menu
Customize scripts to work with menu
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
python
## Code Before: import ibmcnx.test.loadFunction loadFilesService() FilesPolicyService.browse( "title", "true", 1, 25 ) ## Instruction: Customize scripts to work with menu ## Code After: import ibmcnx.test.loadFunction ibmcnx.test.loadFunction.loadFilesService() FilesPolicyService.browse( "title", "true", 1, 25 )
# ... existing code ... import ibmcnx.test.loadFunction ibmcnx.test.loadFunction.loadFilesService() FilesPolicyService.browse( "title", "true", 1, 25 ) # ... rest of the code ...
89c1530882c67a135687df389d0a96d2283873c8
conda_smithy/tests/test_feedstock_io.py
conda_smithy/tests/test_feedstock_io.py
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
Add some tests for `get_repo`.
Add some tests for `get_repo`.
Python
bsd-3-clause
shadowwalkersb/conda-smithy,ocefpaf/conda-smithy,conda-forge/conda-smithy,ocefpaf/conda-smithy,conda-forge/conda-smithy,shadowwalkersb/conda-smithy
python
## Code Before: from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main() ## Instruction: Add some tests for `get_repo`. ## Code After: from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
# ... existing code ... import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): # ... modified code ... fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir # ... rest of the code ...
1abeec5c22f22065e377a1567d3677e56fbc1b2c
tests/version_test.py
tests/version_test.py
import collections import numbers import os from sqlalchemy import __version__ from sqlalchemy_imageattach.version import VERSION, VERSION_INFO def test_version_info(): assert isinstance(VERSION_INFO, collections.Sequence) assert len(VERSION_INFO) == 3 assert isinstance(VERSION_INFO[0], numbers.Integral) assert isinstance(VERSION_INFO[1], numbers.Integral) assert isinstance(VERSION_INFO[2], numbers.Integral) def test_sqlalchemy_version(): assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2]) assert __version__.split('.')[:2] == VERSION.split('.')[:2] def test_version(): assert isinstance(VERSION, str) assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO) def test_print(): with os.popen('python -m sqlalchemy_imageattach.version') as pipe: printed_version = pipe.read().strip() assert printed_version == VERSION
import collections import numbers import os from sqlalchemy import __version__ from sqlalchemy_imageattach.version import VERSION, VERSION_INFO def test_version_info(): assert isinstance(VERSION_INFO, collections.Sequence) assert len(VERSION_INFO) == 3 assert isinstance(VERSION_INFO[0], numbers.Integral) assert isinstance(VERSION_INFO[1], numbers.Integral) assert isinstance(VERSION_INFO[2], numbers.Integral) def test_sqlalchemy_version(): sqla_version_info = list(map(int, __version__.split('.')[:2])) assert sqla_version_info >= list(VERSION_INFO[:2]) assert __version__.split('.')[:2] >= VERSION.split('.')[:2] def test_version(): assert isinstance(VERSION, str) assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO) def test_print(): with os.popen('python -m sqlalchemy_imageattach.version') as pipe: printed_version = pipe.read().strip() assert printed_version == VERSION
Support any later versions of SQLAlchemy
Support any later versions of SQLAlchemy
Python
mit
youknowone/sqlalchemy-imageattach,dahlia/sqlalchemy-imageattach
python
## Code Before: import collections import numbers import os from sqlalchemy import __version__ from sqlalchemy_imageattach.version import VERSION, VERSION_INFO def test_version_info(): assert isinstance(VERSION_INFO, collections.Sequence) assert len(VERSION_INFO) == 3 assert isinstance(VERSION_INFO[0], numbers.Integral) assert isinstance(VERSION_INFO[1], numbers.Integral) assert isinstance(VERSION_INFO[2], numbers.Integral) def test_sqlalchemy_version(): assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2]) assert __version__.split('.')[:2] == VERSION.split('.')[:2] def test_version(): assert isinstance(VERSION, str) assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO) def test_print(): with os.popen('python -m sqlalchemy_imageattach.version') as pipe: printed_version = pipe.read().strip() assert printed_version == VERSION ## Instruction: Support any later versions of SQLAlchemy ## Code After: import collections import numbers import os from sqlalchemy import __version__ from sqlalchemy_imageattach.version import VERSION, VERSION_INFO def test_version_info(): assert isinstance(VERSION_INFO, collections.Sequence) assert len(VERSION_INFO) == 3 assert isinstance(VERSION_INFO[0], numbers.Integral) assert isinstance(VERSION_INFO[1], numbers.Integral) assert isinstance(VERSION_INFO[2], numbers.Integral) def test_sqlalchemy_version(): sqla_version_info = list(map(int, __version__.split('.')[:2])) assert sqla_version_info >= list(VERSION_INFO[:2]) assert __version__.split('.')[:2] >= VERSION.split('.')[:2] def test_version(): assert isinstance(VERSION, str) assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO) def test_print(): with os.popen('python -m sqlalchemy_imageattach.version') as pipe: printed_version = pipe.read().strip() assert printed_version == VERSION
... def test_sqlalchemy_version(): sqla_version_info = list(map(int, __version__.split('.')[:2])) assert sqla_version_info >= list(VERSION_INFO[:2]) assert __version__.split('.')[:2] >= VERSION.split('.')[:2] def test_version(): ...
8624bed7a3397a036dd3b32dc889d1aae99e24d4
src/main/java/me/rafaskb/ticketmaster/commands/CommandReload.java
src/main/java/me/rafaskb/ticketmaster/commands/CommandReload.java
package me.rafaskb.ticketmaster.commands; import org.bukkit.command.CommandSender; import me.rafaskb.ticketmaster.utils.Lang; import me.rafaskb.ticketmaster.utils.LangConfig; import me.rafaskb.ticketmaster.utils.Perm; public class CommandReload extends Command { public CommandReload() { super(Perm.RELOAD); } @Override protected void run(CommandSender sender, String[] args) { LangConfig.reloadConfig(); Lang.sendMessage(sender, Lang.RELOAD_MESSAGE); } }
package me.rafaskb.ticketmaster.commands; import me.rafaskb.ticketmaster.utils.ConfigLoader; import org.bukkit.command.CommandSender; import me.rafaskb.ticketmaster.utils.Lang; import me.rafaskb.ticketmaster.utils.LangConfig; import me.rafaskb.ticketmaster.utils.Perm; public class CommandReload extends Command { public CommandReload() { super(Perm.RELOAD); } @Override protected void run(CommandSender sender, String[] args) { LangConfig.reloadConfig(); ConfigLoader.reloadConfig(); Lang.sendMessage(sender, Lang.RELOAD_MESSAGE); } }
Fix config not reloading by reload command
Fix config not reloading by reload command
Java
apache-2.0
Gamealition/ticket-master,robrotheram/ticket-master
java
## Code Before: package me.rafaskb.ticketmaster.commands; import org.bukkit.command.CommandSender; import me.rafaskb.ticketmaster.utils.Lang; import me.rafaskb.ticketmaster.utils.LangConfig; import me.rafaskb.ticketmaster.utils.Perm; public class CommandReload extends Command { public CommandReload() { super(Perm.RELOAD); } @Override protected void run(CommandSender sender, String[] args) { LangConfig.reloadConfig(); Lang.sendMessage(sender, Lang.RELOAD_MESSAGE); } } ## Instruction: Fix config not reloading by reload command ## Code After: package me.rafaskb.ticketmaster.commands; import me.rafaskb.ticketmaster.utils.ConfigLoader; import org.bukkit.command.CommandSender; import me.rafaskb.ticketmaster.utils.Lang; import me.rafaskb.ticketmaster.utils.LangConfig; import me.rafaskb.ticketmaster.utils.Perm; public class CommandReload extends Command { public CommandReload() { super(Perm.RELOAD); } @Override protected void run(CommandSender sender, String[] args) { LangConfig.reloadConfig(); ConfigLoader.reloadConfig(); Lang.sendMessage(sender, Lang.RELOAD_MESSAGE); } }
... package me.rafaskb.ticketmaster.commands; import me.rafaskb.ticketmaster.utils.ConfigLoader; import org.bukkit.command.CommandSender; import me.rafaskb.ticketmaster.utils.Lang; ... @Override protected void run(CommandSender sender, String[] args) { LangConfig.reloadConfig(); ConfigLoader.reloadConfig(); Lang.sendMessage(sender, Lang.RELOAD_MESSAGE); } ...
5e9486e5c6ce931557c1463b381d7f6f03ac9acd
c/src/ta_data/ta_adddatasourceparam_priv.h
c/src/ta_data/ta_adddatasourceparam_priv.h
/* The following is a private copy of the user provided * parameters for a TA_AddDataSource call. * * Code is in 'ta_data_interface.c' */ typedef struct { TA_SourceId id; TA_SourceFlag flags; TA_Period period; TA_String *location; TA_String *info; TA_String *username; TA_String *password; TA_String *category; TA_String *country; TA_String *exchange; TA_String *type; TA_String *symbol; TA_String *name; } TA_AddDataSourceParamPriv; /* Function to alloc/free a TA_AddDataSourceParamPriv. */ TA_AddDataSourceParamPriv *TA_AddDataSourceParamPrivAlloc( const TA_AddDataSourceParam *param ); TA_RetCode TA_AddDataSourceParamPrivFree( TA_AddDataSourceParamPriv *toBeFreed ); #endif
/* The following is a private copy of the user provided * parameters for a TA_AddDataSource call. * * Code is in 'ta_data_interface.c' */ typedef struct { TA_SourceId id; TA_SourceFlag flags; TA_Period period; TA_String *location; TA_String *info; TA_String *username; TA_String *password; TA_String *category; TA_String *country; TA_String *exchange; TA_String *type; TA_String *symbol; TA_String *name; } TA_AddDataSourceParamPriv; #endif
Remove function prototype that are now static in ta_data_interface.c
Remove function prototype that are now static in ta_data_interface.c git-svn-id: 33305d871a58cfd02b407b81d5206d2a785211eb@618 159cb52c-178a-4f3c-8eb8-d0aff033d058
C
bsd-3-clause
shamanland/ta-lib,shamanland/ta-lib,shamanland/ta-lib,shamanland/ta-lib,shamanland/ta-lib,shamanland/ta-lib
c
## Code Before: /* The following is a private copy of the user provided * parameters for a TA_AddDataSource call. * * Code is in 'ta_data_interface.c' */ typedef struct { TA_SourceId id; TA_SourceFlag flags; TA_Period period; TA_String *location; TA_String *info; TA_String *username; TA_String *password; TA_String *category; TA_String *country; TA_String *exchange; TA_String *type; TA_String *symbol; TA_String *name; } TA_AddDataSourceParamPriv; /* Function to alloc/free a TA_AddDataSourceParamPriv. */ TA_AddDataSourceParamPriv *TA_AddDataSourceParamPrivAlloc( const TA_AddDataSourceParam *param ); TA_RetCode TA_AddDataSourceParamPrivFree( TA_AddDataSourceParamPriv *toBeFreed ); #endif ## Instruction: Remove function prototype that are now static in ta_data_interface.c git-svn-id: 33305d871a58cfd02b407b81d5206d2a785211eb@618 159cb52c-178a-4f3c-8eb8-d0aff033d058 ## Code After: /* The following is a private copy of the user provided * parameters for a TA_AddDataSource call. * * Code is in 'ta_data_interface.c' */ typedef struct { TA_SourceId id; TA_SourceFlag flags; TA_Period period; TA_String *location; TA_String *info; TA_String *username; TA_String *password; TA_String *category; TA_String *country; TA_String *exchange; TA_String *type; TA_String *symbol; TA_String *name; } TA_AddDataSourceParamPriv; #endif
# ... existing code ... TA_String *name; } TA_AddDataSourceParamPriv; #endif # ... rest of the code ...
cdc63148c00a38ebbfd74879da8d646427627d1f
rasterio/rio/main.py
rasterio/rio/main.py
from pkg_resources import iter_entry_points for entry_point in iter_entry_points('rasterio.rio_commands'): entry_point.load()
from pkg_resources import iter_entry_points from rasterio.rio.cli import cli for entry_point in iter_entry_points('rasterio.rio_commands'): entry_point.load()
Add back import of cli.
Add back import of cli.
Python
bsd-3-clause
youngpm/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,njwilson23/rasterio,kapadia/rasterio,brendan-ward/rasterio,perrygeo/rasterio,johanvdw/rasterio,perrygeo/rasterio,johanvdw/rasterio,johanvdw/rasterio,kapadia/rasterio,njwilson23/rasterio,youngpm/rasterio,youngpm/rasterio,perrygeo/rasterio,njwilson23/rasterio,clembou/rasterio,clembou/rasterio,kapadia/rasterio,clembou/rasterio
python
## Code Before: from pkg_resources import iter_entry_points for entry_point in iter_entry_points('rasterio.rio_commands'): entry_point.load() ## Instruction: Add back import of cli. ## Code After: from pkg_resources import iter_entry_points from rasterio.rio.cli import cli for entry_point in iter_entry_points('rasterio.rio_commands'): entry_point.load()
... from pkg_resources import iter_entry_points from rasterio.rio.cli import cli for entry_point in iter_entry_points('rasterio.rio_commands'): ...
7f4d233c48bcdcd327286f3c2ce4f3e2942e6c3c
data_test.py
data_test.py
import data from client import authentication_request_url, GoogleAPIClient c = GoogleAPIClient() if c.access_token is None: print 'Open the following URL in your Web browser and grant access' print authentication_request_url print print 'Enter the authorization code here:' code = raw_input('> ') c.get_token_pair(code) data.Channel.fetch_user_channel(c) s = data.Session() me = s.query(data.Channel).first() del s me.fetch_normal_playlists(c)
import data from client import authentication_request_url, GoogleAPIClient c = GoogleAPIClient() if c.access_token is None: print 'Open the following URL in your Web browser and grant access' print authentication_request_url print print 'Enter the authorization code here:' code = raw_input('> ') c.get_token_pair(code) data.Channel.fetch_user_channel(c) s = data.Session() me = s.query(data.Channel).first() del s me.fetch_normal_playlists(c) s = data.Session() for playlist in s.query(data.Playlist): playlist.fetch_playlist_videos(c)
Test getting playlist videos, too
Test getting playlist videos, too
Python
mit
drkitty/metatube,drkitty/metatube
python
## Code Before: import data from client import authentication_request_url, GoogleAPIClient c = GoogleAPIClient() if c.access_token is None: print 'Open the following URL in your Web browser and grant access' print authentication_request_url print print 'Enter the authorization code here:' code = raw_input('> ') c.get_token_pair(code) data.Channel.fetch_user_channel(c) s = data.Session() me = s.query(data.Channel).first() del s me.fetch_normal_playlists(c) ## Instruction: Test getting playlist videos, too ## Code After: import data from client import authentication_request_url, GoogleAPIClient c = GoogleAPIClient() if c.access_token is None: print 'Open the following URL in your Web browser and grant access' print authentication_request_url print print 'Enter the authorization code here:' code = raw_input('> ') c.get_token_pair(code) data.Channel.fetch_user_channel(c) s = data.Session() me = s.query(data.Channel).first() del s me.fetch_normal_playlists(c) s = data.Session() for playlist in s.query(data.Playlist): playlist.fetch_playlist_videos(c)
... del s me.fetch_normal_playlists(c) s = data.Session() for playlist in s.query(data.Playlist): playlist.fetch_playlist_videos(c) ...
bb897662f7f3fc17b32ffd06962fa5cb582fb6d7
easytz/middleware.py
easytz/middleware.py
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate()
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
Set the timezone right after it gets activated.
Set the timezone right after it gets activated.
Python
apache-2.0
jamesmfriedman/django-easytz
python
## Code Before: from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate() ## Instruction: Set the timezone right after it gets activated. ## Code After: from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
// ... existing code ... # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz // ... modified code ... tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: // ... rest of the code ...
51f0fbbe84d8b217de061254f883444c4bfdce3e
src/org/mozilla/mozstumbler/DateTimeUtils.java
src/org/mozilla/mozstumbler/DateTimeUtils.java
package org.mozilla.mozstumbler; import android.annotation.SuppressLint; import java.util.Date; import java.text.DateFormat; import java.text.SimpleDateFormat; final class DateTimeUtils { private static final DateFormat mISO8601Format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); static final long MILLISECONDS_PER_DAY = 86400000; // milliseconds/day private DateTimeUtils() { } @SuppressLint("SimpleDateFormat") static String formatDate(Date date) { return mISO8601Format.format(date); } static String formatTime(long time) { return formatDate(new Date(time)); } static String formatTimeForLocale(long time) { return DateFormat.getDateTimeInstance().format(time); } static String formatCurrentTime() { return formatTime(System.currentTimeMillis()); } }
package org.mozilla.mozstumbler; import android.annotation.SuppressLint; import java.util.Date; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.TimeZone; final class DateTimeUtils { private static final DateFormat mISO8601Format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); static final long MILLISECONDS_PER_DAY = 86400000; // milliseconds/day static { mISO8601Format.setTimeZone(TimeZone.getTimeZone("UTC")); } private DateTimeUtils() { } @SuppressLint("SimpleDateFormat") static String formatDate(Date date) { return mISO8601Format.format(date); } static String formatTime(long time) { return formatDate(new Date(time)); } static String formatTimeForLocale(long time) { return DateFormat.getDateTimeInstance().format(time); } static String formatCurrentTime() { return formatTime(System.currentTimeMillis()); } }
Fix timezone in submitted data
Fix timezone in submitted data SimpleDateFormat uses the system's default time zone by default.
Java
mpl-2.0
priyankvex/MozStumbler,cascheberg/MozStumbler,priyankvex/MozStumbler,garvankeeley/MozStumbler,dougt/MozStumbler,priyankvex/MozStumbler,MozillaCZ/MozStumbler,cascheberg/MozStumbler,hasadna/OpenTrainApp,MozillaCZ/MozStumbler,petercpg/MozStumbler,cascheberg/MozStumbler,crankycoder/MozStumbler,garvankeeley/MozStumbler,hasadna/OpenTrainApp,MozillaCZ/MozStumbler,crankycoder/MozStumbler,garvankeeley/MozStumbler,petercpg/MozStumbler,crankycoder/MozStumbler,petercpg/MozStumbler
java
## Code Before: package org.mozilla.mozstumbler; import android.annotation.SuppressLint; import java.util.Date; import java.text.DateFormat; import java.text.SimpleDateFormat; final class DateTimeUtils { private static final DateFormat mISO8601Format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); static final long MILLISECONDS_PER_DAY = 86400000; // milliseconds/day private DateTimeUtils() { } @SuppressLint("SimpleDateFormat") static String formatDate(Date date) { return mISO8601Format.format(date); } static String formatTime(long time) { return formatDate(new Date(time)); } static String formatTimeForLocale(long time) { return DateFormat.getDateTimeInstance().format(time); } static String formatCurrentTime() { return formatTime(System.currentTimeMillis()); } } ## Instruction: Fix timezone in submitted data SimpleDateFormat uses the system's default time zone by default. ## Code After: package org.mozilla.mozstumbler; import android.annotation.SuppressLint; import java.util.Date; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.TimeZone; final class DateTimeUtils { private static final DateFormat mISO8601Format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); static final long MILLISECONDS_PER_DAY = 86400000; // milliseconds/day static { mISO8601Format.setTimeZone(TimeZone.getTimeZone("UTC")); } private DateTimeUtils() { } @SuppressLint("SimpleDateFormat") static String formatDate(Date date) { return mISO8601Format.format(date); } static String formatTime(long time) { return formatDate(new Date(time)); } static String formatTimeForLocale(long time) { return DateFormat.getDateTimeInstance().format(time); } static String formatCurrentTime() { return formatTime(System.currentTimeMillis()); } }
// ... existing code ... import java.util.Date; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.TimeZone; final class DateTimeUtils { private static final DateFormat mISO8601Format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); static final long MILLISECONDS_PER_DAY = 86400000; // milliseconds/day static { mISO8601Format.setTimeZone(TimeZone.getTimeZone("UTC")); } private DateTimeUtils() { } // ... rest of the code ...
9be09ccf5749fae1d7a72663d592de5a88a755eb
archive/archive_api/src/responses.py
archive/archive_api/src/responses.py
import json from flask import Response, jsonify class ContextResponse(Response): """ This class adds the "@context" parameter to JSON responses before they're sent to the user. For an explanation of how this works/is used, read https://blog.miguelgrinberg.com/post/customizing-the-flask-response-class """ context_url = "https://api.wellcomecollection.org/storage/v1/context.json" def __init__(self, response, **kwargs): # Here we unmarshal the response as provided by Flask-RESTPlus, add # the @context parameter, then repack it. rv = json.loads(response) # The @context may already be provided if we've been through the # force_type method below. if "@context" in rv: return super(ContextResponse, self).__init__(response, **kwargs) else: rv["@context"] = self.context_url return super(ContextResponse, self).__init__(json.dumps(rv), **kwargs) @classmethod def force_type(cls, rv, environ=None): # All of our endpoints should be returning a dictionary to be # serialised as JSON. assert isinstance(rv, dict) assert "@context" not in rv, rv rv["@context"] = cls.context_url return super(ContextResponse, cls).force_type(jsonify(rv), environ)
import json from flask import Response, jsonify from werkzeug.wsgi import ClosingIterator class ContextResponse(Response): """ This class adds the "@context" parameter to JSON responses before they're sent to the user. For an explanation of how this works/is used, read https://blog.miguelgrinberg.com/post/customizing-the-flask-response-class """ context_url = "https://api.wellcomecollection.org/storage/v1/context.json" def __init__(self, response, *args, **kwargs): """ Unmarshal the response as provided by Flask-RESTPlus, add the @context parameter, then repack it. """ if isinstance(response, ClosingIterator): response = b''.join([char for char in response]) rv = json.loads(response) # The @context may already be provided if we've been through the # force_type method below. if "@context" in rv: return super(ContextResponse, self).__init__(response, **kwargs) else: rv["@context"] = self.context_url json_string = json.dumps(rv) return super(ContextResponse, self).__init__(json_string, **kwargs) @classmethod def force_type(cls, rv, environ=None): # All of our endpoints should be returning a dictionary to be # serialised as JSON. assert isinstance(rv, dict) assert "@context" not in rv, rv rv["@context"] = cls.context_url return super(ContextResponse, cls).force_type(jsonify(rv), environ)
Handle a Werkzeug ClosingIterator (as exposed by the tests)
Handle a Werkzeug ClosingIterator (as exposed by the tests)
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
python
## Code Before: import json from flask import Response, jsonify class ContextResponse(Response): """ This class adds the "@context" parameter to JSON responses before they're sent to the user. For an explanation of how this works/is used, read https://blog.miguelgrinberg.com/post/customizing-the-flask-response-class """ context_url = "https://api.wellcomecollection.org/storage/v1/context.json" def __init__(self, response, **kwargs): # Here we unmarshal the response as provided by Flask-RESTPlus, add # the @context parameter, then repack it. rv = json.loads(response) # The @context may already be provided if we've been through the # force_type method below. if "@context" in rv: return super(ContextResponse, self).__init__(response, **kwargs) else: rv["@context"] = self.context_url return super(ContextResponse, self).__init__(json.dumps(rv), **kwargs) @classmethod def force_type(cls, rv, environ=None): # All of our endpoints should be returning a dictionary to be # serialised as JSON. assert isinstance(rv, dict) assert "@context" not in rv, rv rv["@context"] = cls.context_url return super(ContextResponse, cls).force_type(jsonify(rv), environ) ## Instruction: Handle a Werkzeug ClosingIterator (as exposed by the tests) ## Code After: import json from flask import Response, jsonify from werkzeug.wsgi import ClosingIterator class ContextResponse(Response): """ This class adds the "@context" parameter to JSON responses before they're sent to the user. For an explanation of how this works/is used, read https://blog.miguelgrinberg.com/post/customizing-the-flask-response-class """ context_url = "https://api.wellcomecollection.org/storage/v1/context.json" def __init__(self, response, *args, **kwargs): """ Unmarshal the response as provided by Flask-RESTPlus, add the @context parameter, then repack it. """ if isinstance(response, ClosingIterator): response = b''.join([char for char in response]) rv = json.loads(response) # The @context may already be provided if we've been through the # force_type method below. if "@context" in rv: return super(ContextResponse, self).__init__(response, **kwargs) else: rv["@context"] = self.context_url json_string = json.dumps(rv) return super(ContextResponse, self).__init__(json_string, **kwargs) @classmethod def force_type(cls, rv, environ=None): # All of our endpoints should be returning a dictionary to be # serialised as JSON. assert isinstance(rv, dict) assert "@context" not in rv, rv rv["@context"] = cls.context_url return super(ContextResponse, cls).force_type(jsonify(rv), environ)
// ... existing code ... import json from flask import Response, jsonify from werkzeug.wsgi import ClosingIterator class ContextResponse(Response): // ... modified code ... """ context_url = "https://api.wellcomecollection.org/storage/v1/context.json" def __init__(self, response, *args, **kwargs): """ Unmarshal the response as provided by Flask-RESTPlus, add the @context parameter, then repack it. """ if isinstance(response, ClosingIterator): response = b''.join([char for char in response]) rv = json.loads(response) # The @context may already be provided if we've been through the ... return super(ContextResponse, self).__init__(response, **kwargs) else: rv["@context"] = self.context_url json_string = json.dumps(rv) return super(ContextResponse, self).__init__(json_string, **kwargs) @classmethod def force_type(cls, rv, environ=None): // ... rest of the code ...
435e27f3104cfe6e4f6577c2a5121ae2a6347eb1
tornado_aws/exceptions.py
tornado_aws/exceptions.py
class AWSClientException(Exception): """Base exception class for AWSClient :ivar msg: The error message """ fmt = 'An error occurred' def __init__(self, **kwargs): super(AWSClientException, self).__init__(self.fmt.format(**kwargs)) class ConfigNotFound(AWSClientException): """The configuration file could not be parsed. :ivar path: The path to the config file """ fmt = 'The config file could not be found ({path})' class ConfigParserError(AWSClientException): """Error raised when parsing a configuration file with :py:class`configparser.RawConfigParser` :ivar path: The path to the config file """ fmt = 'Unable to parse config file ({path})' class NoCredentialsError(AWSClientException): """Raised when the credentials could not be located.""" fmt = 'Credentials not found' class NoProfileError(AWSClientException): """Raised when the specified profile could not be located. :ivar path: The path to the config file :ivar profile: The profile that was specified """ fmt = 'Profile ({profile}) not found ({path})'
class AWSClientException(Exception): """Base exception class for AWSClient :ivar msg: The error message """ fmt = 'An error occurred' def __init__(self, **kwargs): super(AWSClientException, self).__init__(self.fmt.format(**kwargs)) class AWSError(AWSClientException): """Raised when the credentials could not be located.""" fmt = '{message}' class ConfigNotFound(AWSClientException): """The configuration file could not be parsed. :ivar path: The path to the config file """ fmt = 'The config file could not be found ({path})' class ConfigParserError(AWSClientException): """Error raised when parsing a configuration file with :py:class`configparser.RawConfigParser` :ivar path: The path to the config file """ fmt = 'Unable to parse config file ({path})' class NoCredentialsError(AWSClientException): """Raised when the credentials could not be located.""" fmt = 'Credentials not found' class NoProfileError(AWSClientException): """Raised when the specified profile could not be located. :ivar path: The path to the config file :ivar profile: The profile that was specified """ fmt = 'Profile ({profile}) not found ({path})'
Add a new generic AWS Error exception
Add a new generic AWS Error exception
Python
bsd-3-clause
gmr/tornado-aws,gmr/tornado-aws
python
## Code Before: class AWSClientException(Exception): """Base exception class for AWSClient :ivar msg: The error message """ fmt = 'An error occurred' def __init__(self, **kwargs): super(AWSClientException, self).__init__(self.fmt.format(**kwargs)) class ConfigNotFound(AWSClientException): """The configuration file could not be parsed. :ivar path: The path to the config file """ fmt = 'The config file could not be found ({path})' class ConfigParserError(AWSClientException): """Error raised when parsing a configuration file with :py:class`configparser.RawConfigParser` :ivar path: The path to the config file """ fmt = 'Unable to parse config file ({path})' class NoCredentialsError(AWSClientException): """Raised when the credentials could not be located.""" fmt = 'Credentials not found' class NoProfileError(AWSClientException): """Raised when the specified profile could not be located. :ivar path: The path to the config file :ivar profile: The profile that was specified """ fmt = 'Profile ({profile}) not found ({path})' ## Instruction: Add a new generic AWS Error exception ## Code After: class AWSClientException(Exception): """Base exception class for AWSClient :ivar msg: The error message """ fmt = 'An error occurred' def __init__(self, **kwargs): super(AWSClientException, self).__init__(self.fmt.format(**kwargs)) class AWSError(AWSClientException): """Raised when the credentials could not be located.""" fmt = '{message}' class ConfigNotFound(AWSClientException): """The configuration file could not be parsed. :ivar path: The path to the config file """ fmt = 'The config file could not be found ({path})' class ConfigParserError(AWSClientException): """Error raised when parsing a configuration file with :py:class`configparser.RawConfigParser` :ivar path: The path to the config file """ fmt = 'Unable to parse config file ({path})' class NoCredentialsError(AWSClientException): """Raised when the credentials could not be located.""" fmt = 'Credentials not found' class NoProfileError(AWSClientException): """Raised when the specified profile could not be located. :ivar path: The path to the config file :ivar profile: The profile that was specified """ fmt = 'Profile ({profile}) not found ({path})'
// ... existing code ... def __init__(self, **kwargs): super(AWSClientException, self).__init__(self.fmt.format(**kwargs)) class AWSError(AWSClientException): """Raised when the credentials could not be located.""" fmt = '{message}' class ConfigNotFound(AWSClientException): // ... rest of the code ...
647bfbff75f7356a974fdf3bc98612c12c47a151
angkot/geo/webapi/views.py
angkot/geo/webapi/views.py
from django.views.decorators.cache import cache_page from ..models import Province, City from angkot.common.decorators import wapi def _province_to_dict(province): data = dict(pid=province.id, name=province.name, code=province.code) return (province.id, data) def _city_to_dict(city): data = dict(cid=city.id, name=city.name, pid=city.province.id) return (city.id, data) @cache_page(60 * 60 * 24) @wapi.endpoint def province_list(req): provinces = Province.objects.filter(enabled=True) ordering = [province.id for province in provinces] provinces = dict(map(_province_to_dict, provinces)) last_update = Province.objects.filter(enabled=True) \ .order_by('-updated') \ .values_list('updated', flat=True)[0] return dict(provinces=provinces, ordering=ordering) @wapi.endpoint def city_list(req): limit = 500 try: page = int(req.GET.get('page', 0)) except ValueError: page = 0 start = page * limit end = start + limit query = City.objects.filter(enabled=True) \ .order_by('pk') cities = query[start:end] cities = dict(map(_city_to_dict, cities)) total = len(query) return dict(cities=cities, page=page, count=len(cities), total=total)
from django.views.decorators.cache import cache_page from ..models import Province, City from angkot.common.decorators import wapi def _province_to_dict(province): return dict(pid=province.id, name=province.name, code=province.code) def _city_to_dict(city): data = dict(cid=city.id, name=city.name, pid=city.province.id) return (city.id, data) @cache_page(60 * 60 * 24) @wapi.endpoint def province_list(req): provinces = Province.objects.filter(enabled=True) \ .order_by('order') provinces = list(map(_province_to_dict, provinces)) return dict(provinces=provinces) @wapi.endpoint def city_list(req): limit = 500 try: page = int(req.GET.get('page', 0)) except ValueError: page = 0 start = page * limit end = start + limit query = City.objects.filter(enabled=True) \ .order_by('pk') cities = query[start:end] cities = dict(map(_city_to_dict, cities)) total = len(query) return dict(cities=cities, page=page, count=len(cities), total=total)
Simplify the province list API
Simplify the province list API It only contains province data as a list without the separate ordering information. The order of the province data in the list is the order of provinces.
Python
agpl-3.0
shirone/angkot,angkot/angkot,shirone/angkot,angkot/angkot,angkot/angkot,shirone/angkot,shirone/angkot,angkot/angkot
python
## Code Before: from django.views.decorators.cache import cache_page from ..models import Province, City from angkot.common.decorators import wapi def _province_to_dict(province): data = dict(pid=province.id, name=province.name, code=province.code) return (province.id, data) def _city_to_dict(city): data = dict(cid=city.id, name=city.name, pid=city.province.id) return (city.id, data) @cache_page(60 * 60 * 24) @wapi.endpoint def province_list(req): provinces = Province.objects.filter(enabled=True) ordering = [province.id for province in provinces] provinces = dict(map(_province_to_dict, provinces)) last_update = Province.objects.filter(enabled=True) \ .order_by('-updated') \ .values_list('updated', flat=True)[0] return dict(provinces=provinces, ordering=ordering) @wapi.endpoint def city_list(req): limit = 500 try: page = int(req.GET.get('page', 0)) except ValueError: page = 0 start = page * limit end = start + limit query = City.objects.filter(enabled=True) \ .order_by('pk') cities = query[start:end] cities = dict(map(_city_to_dict, cities)) total = len(query) return dict(cities=cities, page=page, count=len(cities), total=total) ## Instruction: Simplify the province list API It only contains province data as a list without the separate ordering information. The order of the province data in the list is the order of provinces. ## Code After: from django.views.decorators.cache import cache_page from ..models import Province, City from angkot.common.decorators import wapi def _province_to_dict(province): return dict(pid=province.id, name=province.name, code=province.code) def _city_to_dict(city): data = dict(cid=city.id, name=city.name, pid=city.province.id) return (city.id, data) @cache_page(60 * 60 * 24) @wapi.endpoint def province_list(req): provinces = Province.objects.filter(enabled=True) \ .order_by('order') provinces = list(map(_province_to_dict, provinces)) return dict(provinces=provinces) @wapi.endpoint def city_list(req): limit = 500 try: page = int(req.GET.get('page', 0)) except ValueError: page = 0 start = page * limit end = start + limit query = City.objects.filter(enabled=True) \ .order_by('pk') cities = query[start:end] cities = dict(map(_city_to_dict, cities)) total = len(query) return dict(cities=cities, page=page, count=len(cities), total=total)
# ... existing code ... from angkot.common.decorators import wapi def _province_to_dict(province): return dict(pid=province.id, name=province.name, code=province.code) def _city_to_dict(city): data = dict(cid=city.id, # ... modified code ... @cache_page(60 * 60 * 24) @wapi.endpoint def province_list(req): provinces = Province.objects.filter(enabled=True) \ .order_by('order') provinces = list(map(_province_to_dict, provinces)) return dict(provinces=provinces) @wapi.endpoint def city_list(req): # ... rest of the code ...
7a448c4df3feb717d0b1d8abbf9d32237751aab5
nbgrader/tests/apps/test_nbgrader_extension.py
nbgrader/tests/apps/test_nbgrader_extension.py
import os import nbgrader def test_nbextension(): from nbgrader import _jupyter_nbextension_paths nbexts = _jupyter_nbextension_paths() assert len(nbexts) == 3 assert nbexts[0]['section'] == 'tree' assert nbexts[1]['section'] == 'notebook' assert nbexts[2]['section'] == 'tree' paths = [ext['src'] for ext in nbexts] for path in paths: assert os.path.isdir(os.path.join(os.path.dirname(nbgrader.__file__), path)) def test_serverextension(): from nbgrader import _jupyter_server_extension_paths serverexts = _jupyter_server_extension_paths() assert len(serverexts) == 2 assert serverexts[0]['module'] == 'nbgrader.server_extensions.assignment_list' assert serverexts[1]['module'] == 'nbgrader.server_extensions.formgrader'
import os import nbgrader def test_nbextension(): from nbgrader import _jupyter_nbextension_paths nbexts = _jupyter_nbextension_paths() assert len(nbexts) == 4 assert nbexts[0]['section'] == 'tree' assert nbexts[1]['section'] == 'notebook' assert nbexts[2]['section'] == 'tree' assert nbexts[3]['section'] == 'notebook' paths = [ext['src'] for ext in nbexts] for path in paths: assert os.path.isdir(os.path.join(os.path.dirname(nbgrader.__file__), path)) def test_serverextension(): from nbgrader import _jupyter_server_extension_paths serverexts = _jupyter_server_extension_paths() assert len(serverexts) == 3 assert serverexts[0]['module'] == 'nbgrader.server_extensions.assignment_list' assert serverexts[1]['module'] == 'nbgrader.server_extensions.formgrader' assert serverexts[2]['module'] == 'nbgrader.server_extensions.validate_assignment'
Fix tests for nbgrader extensions
Fix tests for nbgrader extensions
Python
bsd-3-clause
jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader
python
## Code Before: import os import nbgrader def test_nbextension(): from nbgrader import _jupyter_nbextension_paths nbexts = _jupyter_nbextension_paths() assert len(nbexts) == 3 assert nbexts[0]['section'] == 'tree' assert nbexts[1]['section'] == 'notebook' assert nbexts[2]['section'] == 'tree' paths = [ext['src'] for ext in nbexts] for path in paths: assert os.path.isdir(os.path.join(os.path.dirname(nbgrader.__file__), path)) def test_serverextension(): from nbgrader import _jupyter_server_extension_paths serverexts = _jupyter_server_extension_paths() assert len(serverexts) == 2 assert serverexts[0]['module'] == 'nbgrader.server_extensions.assignment_list' assert serverexts[1]['module'] == 'nbgrader.server_extensions.formgrader' ## Instruction: Fix tests for nbgrader extensions ## Code After: import os import nbgrader def test_nbextension(): from nbgrader import _jupyter_nbextension_paths nbexts = _jupyter_nbextension_paths() assert len(nbexts) == 4 assert nbexts[0]['section'] == 'tree' assert nbexts[1]['section'] == 'notebook' assert nbexts[2]['section'] == 'tree' assert nbexts[3]['section'] == 'notebook' paths = [ext['src'] for ext in nbexts] for path in paths: assert os.path.isdir(os.path.join(os.path.dirname(nbgrader.__file__), path)) def test_serverextension(): from nbgrader import _jupyter_server_extension_paths serverexts = _jupyter_server_extension_paths() assert len(serverexts) == 3 assert serverexts[0]['module'] == 'nbgrader.server_extensions.assignment_list' assert serverexts[1]['module'] == 'nbgrader.server_extensions.formgrader' assert serverexts[2]['module'] == 'nbgrader.server_extensions.validate_assignment'
// ... existing code ... def test_nbextension(): from nbgrader import _jupyter_nbextension_paths nbexts = _jupyter_nbextension_paths() assert len(nbexts) == 4 assert nbexts[0]['section'] == 'tree' assert nbexts[1]['section'] == 'notebook' assert nbexts[2]['section'] == 'tree' assert nbexts[3]['section'] == 'notebook' paths = [ext['src'] for ext in nbexts] for path in paths: assert os.path.isdir(os.path.join(os.path.dirname(nbgrader.__file__), path)) // ... modified code ... def test_serverextension(): from nbgrader import _jupyter_server_extension_paths serverexts = _jupyter_server_extension_paths() assert len(serverexts) == 3 assert serverexts[0]['module'] == 'nbgrader.server_extensions.assignment_list' assert serverexts[1]['module'] == 'nbgrader.server_extensions.formgrader' assert serverexts[2]['module'] == 'nbgrader.server_extensions.validate_assignment' // ... rest of the code ...
daaf58639148b220d6dcce13e054374a68f9b01a
testfixtures/tests/test_docs.py
testfixtures/tests/test_docs.py
import unittest from glob import glob from os.path import dirname,join,pardir from doctest import DocFileSuite,REPORT_NDIFF,ELLIPSIS options = REPORT_NDIFF|ELLIPSIS def test_suite(): return unittest.TestSuite(( DocFileSuite( *glob(join(dirname(__file__),pardir,'docs','*.txt')), module_relative=False, optionflags=options ), ))
from glob import glob from manuel import doctest,codeblock from manuel.testing import TestSuite from os.path import dirname,join,pardir from doctest import REPORT_NDIFF,ELLIPSIS def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += codeblock.Manuel() return TestSuite( m, *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) )
Use Manuel instead of doctest.
Use Manuel instead of doctest.
Python
mit
nebulans/testfixtures,Simplistix/testfixtures
python
## Code Before: import unittest from glob import glob from os.path import dirname,join,pardir from doctest import DocFileSuite,REPORT_NDIFF,ELLIPSIS options = REPORT_NDIFF|ELLIPSIS def test_suite(): return unittest.TestSuite(( DocFileSuite( *glob(join(dirname(__file__),pardir,'docs','*.txt')), module_relative=False, optionflags=options ), )) ## Instruction: Use Manuel instead of doctest. ## Code After: from glob import glob from manuel import doctest,codeblock from manuel.testing import TestSuite from os.path import dirname,join,pardir from doctest import REPORT_NDIFF,ELLIPSIS def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += codeblock.Manuel() return TestSuite( m, *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) )
// ... existing code ... from glob import glob from manuel import doctest,codeblock from manuel.testing import TestSuite from os.path import dirname,join,pardir from doctest import REPORT_NDIFF,ELLIPSIS def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += codeblock.Manuel() return TestSuite( m, *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) ) // ... rest of the code ...
b20c1f0c5a71d46b80b405b6561869dfd52307c1
setup.py
setup.py
import os from setuptools import setup, find_packages def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='botnet', version='0.1.0', author='boreq', description = ('IRC bot.'), long_description=read('README.md'), url='https://github.com/boreq/botnet/', license='BSD', packages=find_packages(), install_requires=[ 'blinker>=1.4', 'Click>=2.0', 'requests>=2.12', 'protobuf>=3.0', 'requests-oauthlib>=0.7.0', 'beautifulsoup4>=4.6.0', 'markov==0.0.0', ], entry_points=''' [console_scripts] botnet=botnet.cli:cli ''', dependency_links=[ "git+https://github.com/boreq/markov#egg=markov-0.0.0" ] )
import os from setuptools import setup, find_packages def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='botnet', version='0.1.0', author='boreq', description = ('IRC bot.'), long_description=read('README.md'), url='https://github.com/boreq/botnet/', license='BSD', packages=find_packages(), install_requires=[ 'blinker>=1.4', 'Click>=2.0', 'requests>=2.12', 'protobuf>=3.0', 'requests-oauthlib>=0.7.0', 'beautifulsoup4>=4.6.0', 'markov @ git+https://github.com/boreq/markov#egg=markov-0.0.0', ], entry_points=''' [console_scripts] botnet=botnet.cli:cli ''' )
Fix pip developers beliving that they know what is the best for everyone else
Fix pip developers beliving that they know what is the best for everyone else
Python
mit
boreq/botnet
python
## Code Before: import os from setuptools import setup, find_packages def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='botnet', version='0.1.0', author='boreq', description = ('IRC bot.'), long_description=read('README.md'), url='https://github.com/boreq/botnet/', license='BSD', packages=find_packages(), install_requires=[ 'blinker>=1.4', 'Click>=2.0', 'requests>=2.12', 'protobuf>=3.0', 'requests-oauthlib>=0.7.0', 'beautifulsoup4>=4.6.0', 'markov==0.0.0', ], entry_points=''' [console_scripts] botnet=botnet.cli:cli ''', dependency_links=[ "git+https://github.com/boreq/markov#egg=markov-0.0.0" ] ) ## Instruction: Fix pip developers beliving that they know what is the best for everyone else ## Code After: import os from setuptools import setup, find_packages def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='botnet', version='0.1.0', author='boreq', description = ('IRC bot.'), long_description=read('README.md'), url='https://github.com/boreq/botnet/', license='BSD', packages=find_packages(), install_requires=[ 'blinker>=1.4', 'Click>=2.0', 'requests>=2.12', 'protobuf>=3.0', 'requests-oauthlib>=0.7.0', 'beautifulsoup4>=4.6.0', 'markov @ git+https://github.com/boreq/markov#egg=markov-0.0.0', ], entry_points=''' [console_scripts] botnet=botnet.cli:cli ''' )
# ... existing code ... 'protobuf>=3.0', 'requests-oauthlib>=0.7.0', 'beautifulsoup4>=4.6.0', 'markov @ git+https://github.com/boreq/markov#egg=markov-0.0.0', ], entry_points=''' [console_scripts] botnet=botnet.cli:cli ''' ) # ... rest of the code ...
cd4c37e0daa545f72109122591f4f18365ee3292
app/src/main/java/org/wikipedia/views/ImageDetailHorizontalView.kt
app/src/main/java/org/wikipedia/views/ImageDetailHorizontalView.kt
package org.wikipedia.views import android.content.Context import android.util.AttributeSet import android.view.View import android.widget.LinearLayout import kotlinx.android.synthetic.main.view_image_detail_horizontal.view.* import org.wikipedia.R import org.wikipedia.util.StringUtil class ImageDetailHorizontalView constructor(context: Context, attrs: AttributeSet? = null, defStyle: Int) : LinearLayout(context, attrs, defStyle) { init { View.inflate(context, R.layout.view_image_detail_horizontal, this) if (attrs != null) { val array = getContext().obtainStyledAttributes(attrs, R.styleable.ImageDetailHorizontalView, defStyle, 0) titleText!!.text = array.getString(R.styleable.ImageDetailHorizontalView_title) setDetailText(array.getString(R.styleable.ImageDetailHorizontalView_detail)) array.recycle() } orientation = HORIZONTAL } fun setDetailText(detail: String?) { if (!detail.isNullOrEmpty()) { detailText!!.text = StringUtil.removeHTMLTags(detail) } } }
package org.wikipedia.views import android.content.Context import android.util.AttributeSet import android.view.View import android.widget.LinearLayout import kotlinx.android.synthetic.main.view_image_detail_horizontal.view.* import org.wikipedia.R import org.wikipedia.util.StringUtil class ImageDetailHorizontalView constructor(context: Context, attrs: AttributeSet? = null) : LinearLayout(context, attrs) { init { View.inflate(context, R.layout.view_image_detail_horizontal, this) if (attrs != null) { val array = getContext().obtainStyledAttributes(attrs, R.styleable.ImageDetailHorizontalView, 0, 0) titleText!!.text = array.getString(R.styleable.ImageDetailHorizontalView_title) setDetailText(array.getString(R.styleable.ImageDetailHorizontalView_detail)) array.recycle() } orientation = HORIZONTAL } fun setDetailText(detail: String?) { if (!detail.isNullOrEmpty()) { detailText!!.text = StringUtil.removeHTMLTags(detail) } } }
Fix crash when inflating view
Fix crash when inflating view
Kotlin
apache-2.0
dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,wikimedia/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia
kotlin
## Code Before: package org.wikipedia.views import android.content.Context import android.util.AttributeSet import android.view.View import android.widget.LinearLayout import kotlinx.android.synthetic.main.view_image_detail_horizontal.view.* import org.wikipedia.R import org.wikipedia.util.StringUtil class ImageDetailHorizontalView constructor(context: Context, attrs: AttributeSet? = null, defStyle: Int) : LinearLayout(context, attrs, defStyle) { init { View.inflate(context, R.layout.view_image_detail_horizontal, this) if (attrs != null) { val array = getContext().obtainStyledAttributes(attrs, R.styleable.ImageDetailHorizontalView, defStyle, 0) titleText!!.text = array.getString(R.styleable.ImageDetailHorizontalView_title) setDetailText(array.getString(R.styleable.ImageDetailHorizontalView_detail)) array.recycle() } orientation = HORIZONTAL } fun setDetailText(detail: String?) { if (!detail.isNullOrEmpty()) { detailText!!.text = StringUtil.removeHTMLTags(detail) } } } ## Instruction: Fix crash when inflating view ## Code After: package org.wikipedia.views import android.content.Context import android.util.AttributeSet import android.view.View import android.widget.LinearLayout import kotlinx.android.synthetic.main.view_image_detail_horizontal.view.* import org.wikipedia.R import org.wikipedia.util.StringUtil class ImageDetailHorizontalView constructor(context: Context, attrs: AttributeSet? = null) : LinearLayout(context, attrs) { init { View.inflate(context, R.layout.view_image_detail_horizontal, this) if (attrs != null) { val array = getContext().obtainStyledAttributes(attrs, R.styleable.ImageDetailHorizontalView, 0, 0) titleText!!.text = array.getString(R.styleable.ImageDetailHorizontalView_title) setDetailText(array.getString(R.styleable.ImageDetailHorizontalView_detail)) array.recycle() } orientation = HORIZONTAL } fun setDetailText(detail: String?) { if (!detail.isNullOrEmpty()) { detailText!!.text = StringUtil.removeHTMLTags(detail) } } }
// ... existing code ... import org.wikipedia.R import org.wikipedia.util.StringUtil class ImageDetailHorizontalView constructor(context: Context, attrs: AttributeSet? = null) : LinearLayout(context, attrs) { init { View.inflate(context, R.layout.view_image_detail_horizontal, this) if (attrs != null) { val array = getContext().obtainStyledAttributes(attrs, R.styleable.ImageDetailHorizontalView, 0, 0) titleText!!.text = array.getString(R.styleable.ImageDetailHorizontalView_title) setDetailText(array.getString(R.styleable.ImageDetailHorizontalView_detail)) array.recycle() // ... rest of the code ...
04ad06ca111358f4510d2849d2b3475d55ee5f02
android/app/src/main/java/com/reactnativenavigation/events/EventBus.java
android/app/src/main/java/com/reactnativenavigation/events/EventBus.java
package com.reactnativenavigation.events; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; public enum EventBus { instance; private final List<WeakReference<Subscriber>> subscribers = new ArrayList<>(); public void register(Subscriber subscriber) { if (isSubscribed(subscriber)) return; subscribers.add(new WeakReference<>(subscriber)); } public void unregister(Subscriber subscriber) { ListIterator<WeakReference<Subscriber>> iterator = subscribers.listIterator(); while (iterator.hasNext()) { WeakReference<Subscriber> ref = iterator.next(); Subscriber registered = ref.get(); if (registered != null && registered == subscriber) { subscribers.remove(ref); } } } public void post(Event event) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null) { registered.onEvent(event); } } } public boolean isSubscribed(Subscriber subscriber) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null && registered.equals(subscriber)) { return true; } } return false; } }
package com.reactnativenavigation.events; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; public enum EventBus { instance; private final List<WeakReference<Subscriber>> subscribers = new ArrayList<>(); public void register(Subscriber subscriber) { if (isSubscribed(subscriber)) return; subscribers.add(new WeakReference<>(subscriber)); } public void unregister(Subscriber subscriber) { ListIterator<WeakReference<Subscriber>> iterator = subscribers.listIterator(); while (iterator.hasNext()) { WeakReference<Subscriber> ref = iterator.next(); Subscriber registered = ref.get(); if (registered != null && registered == subscriber) { iterator.remove(); } } } public void post(Event event) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null) { registered.onEvent(event); } } } public boolean isSubscribed(Subscriber subscriber) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null && registered.equals(subscriber)) { return true; } } return false; } }
Fix concurrent modification exception when switching root
Fix concurrent modification exception when switching root
Java
mit
brianjd/react-native-navigation,ceyhuno/react-native-navigation,3sidedcube/react-native-navigation,pqkluan/react-native-navigation,chicojasl/react-native-navigation,brianjd/react-native-navigation,lakhman/react-native-navigation,MediaMonksMobile/react-native-navigation,chicojasl/react-native-navigation,MediaMonksMobile/react-native-navigation,kristoff-it/react-native-navigation,MattDavies/react-native-navigation,iotize/react-native-navigation,Jpoliachik/react-native-navigation,inalist/react-native-navigation,chicojasl/react-native-navigation,guyca/react-native-navigation,MediaMonksMobile/react-native-navigation,shahen94/react-native-navigation,coteries/react-native-navigation,ceyhuno/react-native-navigation,ceyhuno/react-native-navigation,BrendonSled/react-native-navigation,uni-react/react-native-navigation,guangmingzizai/react-native-navigation,junedomingo/react-native-navigation,MattDavies/react-native-navigation,snapme/react-native-navigation,ceyhuno/react-native-navigation,wix/react-native-navigation,thanhzusu/react-native-navigation,guyca/react-native-navigation,pqkluan/react-native-navigation,wix/react-native-navigation,holmesal/react-native-navigation,shahen94/react-native-navigation,brianjd/react-native-navigation,iotize/react-native-navigation,junedomingo/react-native-navigation,okarakose/react-native-navigation,lakhman/react-native-navigation,kristoff-it/react-native-navigation,shahen94/react-native-navigation,lakhman/react-native-navigation,BrendonSled/react-native-navigation,eeynard/react-native-navigation,thanhzusu/react-native-navigation,3sidedcube/react-native-navigation,varungupta85/react-native-navigation,uni-react/react-native-navigation,holmesal/react-native-navigation,brianjd/react-native-navigation,brianjd/react-native-navigation,eeynard/react-native-navigation,chicojasl/react-native-navigation,eeynard/react-native-navigation,thanhzusu/react-native-navigation,snapme/react-native-navigation,guangmingzizai/react-native-navigation,yusufyildirim/react-native-navigation,iotize/react-native-navigation,Jpoliachik/react-native-navigation,uni-react/react-native-navigation,brianjd/react-native-navigation,Jpoliachik/react-native-navigation,iotize/react-native-navigation,okarakose/react-native-navigation,pqkluan/react-native-navigation,okarakose/react-native-navigation,3sidedcube/react-native-navigation,Ehesp/react-native-navigation,chicojasl/react-native-navigation,chicojasl/react-native-navigation,wix/react-native-navigation,guangmingzizai/react-native-navigation,Jpoliachik/react-native-navigation,luggit/react-native-navigation,junedomingo/react-native-navigation,Ehesp/react-native-navigation,junedomingo/react-native-navigation,holmesal/react-native-navigation,guyca/react-native-navigation,wix/react-native-navigation,lkj01010/react-native-navigation,snapme/react-native-navigation,coteries/react-native-navigation,wix/react-native-navigation,lakhman/react-native-navigation,InTeach/react-native-navigation,holmesal/react-native-navigation,lkj01010/react-native-navigation,yusufyildirim/react-native-navigation,InTeach/react-native-navigation,ceyhuno/react-native-navigation,guyca/react-native-navigation,InTeach/react-native-navigation,yusufyildirim/react-native-navigation,MattDavies/react-native-navigation,varungupta85/react-native-navigation,snapme/react-native-navigation,inalist/react-native-navigation,shahen94/react-native-navigation,inalist/react-native-navigation,luggit/react-native-navigation,varungupta85/react-native-navigation,BrendonSled/react-native-navigation,eeynard/react-native-navigation,luggit/react-native-navigation,Ehesp/react-native-navigation,Jpoliachik/react-native-navigation,uni-react/react-native-navigation,3sidedcube/react-native-navigation,InTeach/react-native-navigation,coteries/react-native-navigation,inalist/react-native-navigation,kristoff-it/react-native-navigation,MediaMonksMobile/react-native-navigation,yusufyildirim/react-native-navigation,pqkluan/react-native-navigation,wix/react-native-navigation,lkj01010/react-native-navigation,guangmingzizai/react-native-navigation,luggit/react-native-navigation,BrendonSled/react-native-navigation,Ehesp/react-native-navigation,kristoff-it/react-native-navigation,thanhzusu/react-native-navigation,Jpoliachik/react-native-navigation,varungupta85/react-native-navigation,thanhzusu/react-native-navigation,coteries/react-native-navigation,ceyhuno/react-native-navigation,okarakose/react-native-navigation,MattDavies/react-native-navigation,thanhzusu/react-native-navigation
java
## Code Before: package com.reactnativenavigation.events; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; public enum EventBus { instance; private final List<WeakReference<Subscriber>> subscribers = new ArrayList<>(); public void register(Subscriber subscriber) { if (isSubscribed(subscriber)) return; subscribers.add(new WeakReference<>(subscriber)); } public void unregister(Subscriber subscriber) { ListIterator<WeakReference<Subscriber>> iterator = subscribers.listIterator(); while (iterator.hasNext()) { WeakReference<Subscriber> ref = iterator.next(); Subscriber registered = ref.get(); if (registered != null && registered == subscriber) { subscribers.remove(ref); } } } public void post(Event event) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null) { registered.onEvent(event); } } } public boolean isSubscribed(Subscriber subscriber) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null && registered.equals(subscriber)) { return true; } } return false; } } ## Instruction: Fix concurrent modification exception when switching root ## Code After: package com.reactnativenavigation.events; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; public enum EventBus { instance; private final List<WeakReference<Subscriber>> subscribers = new ArrayList<>(); public void register(Subscriber subscriber) { if (isSubscribed(subscriber)) return; subscribers.add(new WeakReference<>(subscriber)); } public void unregister(Subscriber subscriber) { ListIterator<WeakReference<Subscriber>> iterator = subscribers.listIterator(); while (iterator.hasNext()) { WeakReference<Subscriber> ref = iterator.next(); Subscriber registered = ref.get(); if (registered != null && registered == subscriber) { iterator.remove(); } } } public void post(Event event) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null) { registered.onEvent(event); } } } public boolean isSubscribed(Subscriber subscriber) { for (WeakReference<Subscriber> ref : subscribers) { Subscriber registered = ref.get(); if (registered != null && registered.equals(subscriber)) { return true; } } return false; } }
// ... existing code ... WeakReference<Subscriber> ref = iterator.next(); Subscriber registered = ref.get(); if (registered != null && registered == subscriber) { iterator.remove(); } } } // ... rest of the code ...
81768b4a3ae0afc71ab7e07f0d3c45eaf0d1b5a7
Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py
Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py
from ooop import OOOP import configdb O = OOOP(**configdb.ooop) imp_obj = O.GiscedataFacturacioImportacioLinia imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like','Ja existeix una factura')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")]) total = len(imp_del_ids) n = 0 for imp_del_id in imp_del_ids: try: imp_obj.unlink([imp_del_id]) n +=1 print "%d/%d" % (n,total) except Exception, e: print e
from ooop import OOOP import configdb O = OOOP(**configdb.ooop) imp_obj = O.GiscedataFacturacioImportacioLinia imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")]) #imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")]) total = len(imp_del_ids) n = 0 for imp_del_id in imp_del_ids: try: imp_obj.unlink([imp_del_id]) n +=1 print "%d/%d" % (n,total) except Exception, e: print e
Refactor to new F1 erro's message
Refactor to new F1 erro's message
Python
agpl-3.0
Som-Energia/invoice-janitor
python
## Code Before: from ooop import OOOP import configdb O = OOOP(**configdb.ooop) imp_obj = O.GiscedataFacturacioImportacioLinia imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like','Ja existeix una factura')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")]) total = len(imp_del_ids) n = 0 for imp_del_id in imp_del_ids: try: imp_obj.unlink([imp_del_id]) n +=1 print "%d/%d" % (n,total) except Exception, e: print e ## Instruction: Refactor to new F1 erro's message ## Code After: from ooop import OOOP import configdb O = OOOP(**configdb.ooop) imp_obj = O.GiscedataFacturacioImportacioLinia imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")]) #imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")]) total = len(imp_del_ids) n = 0 for imp_del_id in imp_del_ids: try: imp_obj.unlink([imp_del_id]) n +=1 print "%d/%d" % (n,total) except Exception, e: print e
... imp_obj = O.GiscedataFacturacioImportacioLinia imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")]) #imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")]) imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")]) ...
ca3366bfdec91797c0a5406a5ba8094c4d13a233
comics/feedback/forms.py
comics/feedback/forms.py
from bootstrap.forms import BootstrapForm from django import forms class FeedbackForm(BootstrapForm): message = forms.CharField(label="What's on your heart", help_text='Remember to sign with you mail address if you want a reply.', widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
from bootstrap.forms import BootstrapForm from django import forms class FeedbackForm(BootstrapForm): message = forms.CharField(label="What's on your heart", help_text='Sign with your email address if you want a reply.', widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
Fix typo in feedback form help text
Fix typo in feedback form help text
Python
agpl-3.0
datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics
python
## Code Before: from bootstrap.forms import BootstrapForm from django import forms class FeedbackForm(BootstrapForm): message = forms.CharField(label="What's on your heart", help_text='Remember to sign with you mail address if you want a reply.', widget=forms.Textarea(attrs={'rows': 5, 'cols': 100})) ## Instruction: Fix typo in feedback form help text ## Code After: from bootstrap.forms import BootstrapForm from django import forms class FeedbackForm(BootstrapForm): message = forms.CharField(label="What's on your heart", help_text='Sign with your email address if you want a reply.', widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
// ... existing code ... class FeedbackForm(BootstrapForm): message = forms.CharField(label="What's on your heart", help_text='Sign with your email address if you want a reply.', widget=forms.Textarea(attrs={'rows': 5, 'cols': 100})) // ... rest of the code ...
f908501860858311536a3fef03fda7a632ce5412
djohno/tests/test_utils.py
djohno/tests/test_utils.py
from django.core.exceptions import ValidationError from django.test import TestCase import djohno from djohno.utils import ( is_pretty_from_address, get_app_versions ) class DjohnoUtilTests(TestCase): def test_is_pretty_from_address_fails_on_bare_address(self): """ Ensure normal email addresses aren't parsed as being "pretty". """ self.assertFalse(is_pretty_from_address('[email protected]')) def test_is_pretty_from_succeeds_on_pretty_address(self): """ Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as being "pretty". """ self.assertTrue(is_pretty_from_address('Foo <[email protected]>')) def test_is_pretty_from_raises_validation_error_on_bad_input(self): """ Ensure invalid email addresses (e.g. "hello") raise ValidationError if given invalid inputs. """ with self.assertRaises(ValidationError): self.assertTrue(is_pretty_from_address('hello')) def test_get_installed_app_versions(self): versions = get_app_versions() self.assertEqual(versions['Djohno']['installed'], djohno.__version__) self.assertEqual(versions['Baz']['installed'], '0.4.2') self.assertEqual(versions['Moo']['installed'], '0.42')
from django.core.exceptions import ValidationError from django.test import TestCase import djohno from djohno.utils import ( is_pretty_from_address, get_app_versions ) class DjohnoUtilTests(TestCase): def test_is_pretty_from_address_fails_on_bare_address(self): """ Ensure normal email addresses aren't parsed as being "pretty". """ self.assertFalse(is_pretty_from_address('[email protected]')) def test_is_pretty_from_succeeds_on_pretty_address(self): """ Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as being "pretty". """ self.assertTrue(is_pretty_from_address('Foo <[email protected]>')) def test_is_pretty_from_raises_validation_error_on_bad_input(self): """ Ensure invalid email addresses (e.g. "hello") raise ValidationError if given invalid inputs. """ with self.assertRaises(ValidationError): self.assertTrue(is_pretty_from_address('hello')) def test_get_installed_app_versions(self): """ Ensure we can correctly get the version of a few simple apps (Baz and Moo are bundled in djohno.test, and set up in test_settings.py). """ versions = get_app_versions() self.assertEqual(versions['Djohno']['installed'], djohno.__version__) self.assertEqual(versions['Baz']['installed'], '0.4.2') self.assertEqual(versions['Moo']['installed'], '0.42')
Add a missing test description
Add a missing test description
Python
bsd-2-clause
dominicrodger/djohno,dominicrodger/djohno
python
## Code Before: from django.core.exceptions import ValidationError from django.test import TestCase import djohno from djohno.utils import ( is_pretty_from_address, get_app_versions ) class DjohnoUtilTests(TestCase): def test_is_pretty_from_address_fails_on_bare_address(self): """ Ensure normal email addresses aren't parsed as being "pretty". """ self.assertFalse(is_pretty_from_address('[email protected]')) def test_is_pretty_from_succeeds_on_pretty_address(self): """ Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as being "pretty". """ self.assertTrue(is_pretty_from_address('Foo <[email protected]>')) def test_is_pretty_from_raises_validation_error_on_bad_input(self): """ Ensure invalid email addresses (e.g. "hello") raise ValidationError if given invalid inputs. """ with self.assertRaises(ValidationError): self.assertTrue(is_pretty_from_address('hello')) def test_get_installed_app_versions(self): versions = get_app_versions() self.assertEqual(versions['Djohno']['installed'], djohno.__version__) self.assertEqual(versions['Baz']['installed'], '0.4.2') self.assertEqual(versions['Moo']['installed'], '0.42') ## Instruction: Add a missing test description ## Code After: from django.core.exceptions import ValidationError from django.test import TestCase import djohno from djohno.utils import ( is_pretty_from_address, get_app_versions ) class DjohnoUtilTests(TestCase): def test_is_pretty_from_address_fails_on_bare_address(self): """ Ensure normal email addresses aren't parsed as being "pretty". """ self.assertFalse(is_pretty_from_address('[email protected]')) def test_is_pretty_from_succeeds_on_pretty_address(self): """ Ensure pretty addresses (e.g. Foo <[email protected]>) are parsed as being "pretty". """ self.assertTrue(is_pretty_from_address('Foo <[email protected]>')) def test_is_pretty_from_raises_validation_error_on_bad_input(self): """ Ensure invalid email addresses (e.g. "hello") raise ValidationError if given invalid inputs. """ with self.assertRaises(ValidationError): self.assertTrue(is_pretty_from_address('hello')) def test_get_installed_app_versions(self): """ Ensure we can correctly get the version of a few simple apps (Baz and Moo are bundled in djohno.test, and set up in test_settings.py). """ versions = get_app_versions() self.assertEqual(versions['Djohno']['installed'], djohno.__version__) self.assertEqual(versions['Baz']['installed'], '0.4.2') self.assertEqual(versions['Moo']['installed'], '0.42')
... self.assertTrue(is_pretty_from_address('hello')) def test_get_installed_app_versions(self): """ Ensure we can correctly get the version of a few simple apps (Baz and Moo are bundled in djohno.test, and set up in test_settings.py). """ versions = get_app_versions() self.assertEqual(versions['Djohno']['installed'], djohno.__version__) self.assertEqual(versions['Baz']['installed'], '0.4.2') ...
8a39a9e9c41320b5a89bc6ddcac779ca4afa0522
src/main/java/com/github/blindpirate/gogradle/task/InstallBuildDependenciesTask.java
src/main/java/com/github/blindpirate/gogradle/task/InstallBuildDependenciesTask.java
package com.github.blindpirate.gogradle.task; import com.github.blindpirate.gogradle.build.BuildManager; import com.github.blindpirate.gogradle.core.dependency.GolangDependency; import com.github.blindpirate.gogradle.core.dependency.tree.DependencyTreeNode; import org.gradle.api.tasks.TaskAction; import javax.inject.Inject; import static com.github.blindpirate.gogradle.build.Configuration.BUILD; public class InstallBuildDependenciesTask extends AbstractGolangTask { @Inject private BuildManager buildManager; public InstallBuildDependenciesTask() { dependsOn(GolangTaskContainer.RESOLVE_BUILD_DEPENDENCIES_TASK_NAME); } @TaskAction public void installDependencies() { DependencyTreeNode rootNode = getTask(ResolveBuildDependenciesTask.class).getDependencyTree(); rootNode.flatten() .stream() .map(GolangDependency::resolve) .forEach((dependency) -> buildManager.installDependency(dependency, BUILD)); } }
package com.github.blindpirate.gogradle.task; import com.github.blindpirate.gogradle.build.BuildManager; import com.github.blindpirate.gogradle.core.dependency.ResolvedDependency; import com.github.blindpirate.gogradle.core.dependency.tree.DependencyTreeNode; import org.gradle.api.tasks.TaskAction; import javax.inject.Inject; import static com.github.blindpirate.gogradle.build.Configuration.BUILD; public class InstallBuildDependenciesTask extends AbstractGolangTask { @Inject private BuildManager buildManager; public InstallBuildDependenciesTask() { dependsOn(GolangTaskContainer.RESOLVE_BUILD_DEPENDENCIES_TASK_NAME); } @TaskAction public void installDependencies() { DependencyTreeNode rootNode = getTask(ResolveBuildDependenciesTask.class).getDependencyTree(); rootNode.flatten() .stream() .map(dependency -> (ResolvedDependency) dependency) .forEach((dependency) -> buildManager.installDependency(dependency, BUILD)); } }
Use type casting instead of resolve() since we know it is actually ResolvedDependency
Use type casting instead of resolve() since we know it is actually ResolvedDependency
Java
apache-2.0
gogradle/gogradle,gogradle/gogradle,gogradle/gogradle,blindpirate/gogradle,gogradle/gogradle,blindpirate/gogradle
java
## Code Before: package com.github.blindpirate.gogradle.task; import com.github.blindpirate.gogradle.build.BuildManager; import com.github.blindpirate.gogradle.core.dependency.GolangDependency; import com.github.blindpirate.gogradle.core.dependency.tree.DependencyTreeNode; import org.gradle.api.tasks.TaskAction; import javax.inject.Inject; import static com.github.blindpirate.gogradle.build.Configuration.BUILD; public class InstallBuildDependenciesTask extends AbstractGolangTask { @Inject private BuildManager buildManager; public InstallBuildDependenciesTask() { dependsOn(GolangTaskContainer.RESOLVE_BUILD_DEPENDENCIES_TASK_NAME); } @TaskAction public void installDependencies() { DependencyTreeNode rootNode = getTask(ResolveBuildDependenciesTask.class).getDependencyTree(); rootNode.flatten() .stream() .map(GolangDependency::resolve) .forEach((dependency) -> buildManager.installDependency(dependency, BUILD)); } } ## Instruction: Use type casting instead of resolve() since we know it is actually ResolvedDependency ## Code After: package com.github.blindpirate.gogradle.task; import com.github.blindpirate.gogradle.build.BuildManager; import com.github.blindpirate.gogradle.core.dependency.ResolvedDependency; import com.github.blindpirate.gogradle.core.dependency.tree.DependencyTreeNode; import org.gradle.api.tasks.TaskAction; import javax.inject.Inject; import static com.github.blindpirate.gogradle.build.Configuration.BUILD; public class InstallBuildDependenciesTask extends AbstractGolangTask { @Inject private BuildManager buildManager; public InstallBuildDependenciesTask() { dependsOn(GolangTaskContainer.RESOLVE_BUILD_DEPENDENCIES_TASK_NAME); } @TaskAction public void installDependencies() { DependencyTreeNode rootNode = getTask(ResolveBuildDependenciesTask.class).getDependencyTree(); rootNode.flatten() .stream() .map(dependency -> (ResolvedDependency) dependency) .forEach((dependency) -> buildManager.installDependency(dependency, BUILD)); } }
# ... existing code ... package com.github.blindpirate.gogradle.task; import com.github.blindpirate.gogradle.build.BuildManager; import com.github.blindpirate.gogradle.core.dependency.ResolvedDependency; import com.github.blindpirate.gogradle.core.dependency.tree.DependencyTreeNode; import org.gradle.api.tasks.TaskAction; # ... modified code ... DependencyTreeNode rootNode = getTask(ResolveBuildDependenciesTask.class).getDependencyTree(); rootNode.flatten() .stream() .map(dependency -> (ResolvedDependency) dependency) .forEach((dependency) -> buildManager.installDependency(dependency, BUILD)); } } # ... rest of the code ...
613261ba7349db4ec5ba0a35f3c1ae7441895f69
src/edu/ames/frc/robot/InputManager.java
src/edu/ames/frc/robot/InputManager.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.ames.frc.robot; /* List of buttons/toggles needed * Manual pivot toggle: 2 * Speed boost button: Active joystick push * Force shoot button: 4 * Force Realign button: 7 * Stop auto-target toggle: 10 * Activate frisbee grab button: 8 * Launch climb procedure: 9 (We need to make it nesscesary to hold, or double/triple tap the button so as to aviod accidentally starting the climb) * */ public class InputManager { }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.ames.frc.robot; /* List of buttons/toggles needed * Manual pivot toggle: 2 * Speed boost button: Active joystick push * Force shoot button: 4 * Force Realign button: 7 * Stop auto-target toggle: 10 * Activate frisbee grab button: 8 * Launch climb procedure: 9 (We need to make it nesscesary to hold, or double/triple tap the button so as to aviod accidentally starting the climb) * Test Gitttttt */ public class InputManager { }
Test of the Git system
Test of the Git system
Java
bsd-3-clause
amesrobotics/2013robot
java
## Code Before: /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.ames.frc.robot; /* List of buttons/toggles needed * Manual pivot toggle: 2 * Speed boost button: Active joystick push * Force shoot button: 4 * Force Realign button: 7 * Stop auto-target toggle: 10 * Activate frisbee grab button: 8 * Launch climb procedure: 9 (We need to make it nesscesary to hold, or double/triple tap the button so as to aviod accidentally starting the climb) * */ public class InputManager { } ## Instruction: Test of the Git system ## Code After: /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.ames.frc.robot; /* List of buttons/toggles needed * Manual pivot toggle: 2 * Speed boost button: Active joystick push * Force shoot button: 4 * Force Realign button: 7 * Stop auto-target toggle: 10 * Activate frisbee grab button: 8 * Launch climb procedure: 9 (We need to make it nesscesary to hold, or double/triple tap the button so as to aviod accidentally starting the climb) * Test Gitttttt */ public class InputManager { }
// ... existing code ... * Stop auto-target toggle: 10 * Activate frisbee grab button: 8 * Launch climb procedure: 9 (We need to make it nesscesary to hold, or double/triple tap the button so as to aviod accidentally starting the climb) * Test Gitttttt */ public class InputManager { // ... rest of the code ...
2673f1bac21e43a4cad9edb7352f89750d6d0144
tests/settings.py
tests/settings.py
from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage"
from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage" ALLOW_ANONYMOUS_ACCESS = False
Enforce that tests run with anonymous access off.
Enforce that tests run with anonymous access off.
Python
bsd-2-clause
mozilla/moztrap,mccarrmb/moztrap,mozilla/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,mozilla/moztrap,shinglyu/moztrap,mccarrmb/moztrap,shinglyu/moztrap,shinglyu/moztrap,mccarrmb/moztrap,mozilla/moztrap,bobsilverberg/moztrap,mozilla/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,bobsilverberg/moztrap
python
## Code Before: from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage" ## Instruction: Enforce that tests run with anonymous access off. ## Code After: from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage" ALLOW_ANONYMOUS_ACCESS = False
// ... existing code ... from cc.settings.default import * DEFAULT_FILE_STORAGE = "tests.storage.MemoryStorage" ALLOW_ANONYMOUS_ACCESS = False // ... rest of the code ...
540c5f2969e75a0f461e9d46090cfe8d92c53b00
Simulator/plot.py
Simulator/plot.py
from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] return 'history_' + y + 'txt' def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] y = y + 'txt' i = len(y) - 1 while i>=0 : if y[i]=='\\' or y[i]=='/' : break i-=1 if i>=0 : return y[:i+1] + 'history_' + y[i+1:] else: return 'history_' + y def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
Remove history name error for absolute paths
Remove history name error for absolute paths
Python
mit
aayushkapadia/chemical_reaction_simulator
python
## Code Before: from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] return 'history_' + y + 'txt' def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList) ## Instruction: Remove history name error for absolute paths ## Code After: from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] y = y + 'txt' i = len(y) - 1 while i>=0 : if y[i]=='\\' or y[i]=='/' : break i-=1 if i>=0 : return y[:i+1] + 'history_' + y[i+1:] else: return 'history_' + y def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
// ... existing code ... import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] y = y + 'txt' i = len(y) - 1 while i>=0 : if y[i]=='\\' or y[i]=='/' : break i-=1 if i>=0 : return y[:i+1] + 'history_' + y[i+1:] else: return 'history_' + y def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) // ... rest of the code ...
4c85300c5458053ac08a393b00513c80baf28031
reqon/deprecated/__init__.py
reqon/deprecated/__init__.py
import rethinkdb as r from . import coerce, geo, operators, terms from .coerce import COERSIONS from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS from .terms import TERMS from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError def query(query): try: reql = r.db(query['$db']).table(query['$table']) except KeyError: try: reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(query['$query'], reql) def build_terms(reql, query): for sequence in query: term = sequence[0] try: reql = TERMS[term](reql, *sequence[1:]) except ReqonError: raise except r.ReqlError: message = 'Invalid values for {0} with args {1}' raise ReqonError(message.format(term, sequence[1:])) except Exception: message = 'Unknown exception, {0}: {1}' raise ReqonError(message.format(term, sequence[1:])) return reql
import rethinkdb as r from . import coerce, geo, operators, terms from .coerce import COERSIONS from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS from .terms import TERMS from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError def query(query): try: reql = r.db(query['$db']).table(query['$table']) except KeyError: try: reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(reql, query['$query']) def build_terms(reql, query): for sequence in query: term = sequence[0] try: reql = TERMS[term](reql, *sequence[1:]) except ReqonError: raise except r.ReqlError: message = 'Invalid values for {0} with args {1}' raise ReqonError(message.format(term, sequence[1:])) except Exception: message = 'Unknown exception, {0}: {1}' raise ReqonError(message.format(term, sequence[1:])) return reql
Fix arguments order of reqon.deprecated.build_terms().
Fix arguments order of reqon.deprecated.build_terms().
Python
mit
dmpayton/reqon
python
## Code Before: import rethinkdb as r from . import coerce, geo, operators, terms from .coerce import COERSIONS from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS from .terms import TERMS from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError def query(query): try: reql = r.db(query['$db']).table(query['$table']) except KeyError: try: reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(query['$query'], reql) def build_terms(reql, query): for sequence in query: term = sequence[0] try: reql = TERMS[term](reql, *sequence[1:]) except ReqonError: raise except r.ReqlError: message = 'Invalid values for {0} with args {1}' raise ReqonError(message.format(term, sequence[1:])) except Exception: message = 'Unknown exception, {0}: {1}' raise ReqonError(message.format(term, sequence[1:])) return reql ## Instruction: Fix arguments order of reqon.deprecated.build_terms(). ## Code After: import rethinkdb as r from . import coerce, geo, operators, terms from .coerce import COERSIONS from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS from .terms import TERMS from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError def query(query): try: reql = r.db(query['$db']).table(query['$table']) except KeyError: try: reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(reql, query['$query']) def build_terms(reql, query): for sequence in query: term = sequence[0] try: reql = TERMS[term](reql, *sequence[1:]) except ReqonError: raise except r.ReqlError: message = 'Invalid values for {0} with args {1}' raise ReqonError(message.format(term, sequence[1:])) except Exception: message = 'Unknown exception, {0}: {1}' raise ReqonError(message.format(term, sequence[1:])) return reql
// ... existing code ... reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(reql, query['$query']) def build_terms(reql, query): // ... rest of the code ...
2c652df7f7ec93ecad0eb23094f12c6acd86256c
python/hello.py
python/hello.py
(lambda _, __, ___, ____, _____, ______, _______, ________: getattr( __import__(True.__class__.__name__[_] + [].__class__.__name__[__]), ().__class__.__eq__.__class__.__name__[:__] + ().__iter__().__class__.__name__[_____:________] )( _, (lambda _, __, ___: _(_, __, ___))( lambda _, __, ___: chr(___ % __) + _(_, __, ___ // __) if ___ else (lambda: _).func_code.co_lnotab, _ << ________, (((_____ << ____) + _) << ((___ << _____) - ___)) + (((((___ << __) - _) << ___) + _) << ((_____ << ____) + (_ << _))) + (((_______ << __) - _) << (((((_ << ___) + _)) << ___) + (_ << _))) + (((_______ << ___) + _) << ((_ << ______) + _)) + (((_______ << ____) - _) << ((_______ << ___))) + (((_ << ____) - _) << ((((___ << __) + _) << __) - _)) - (_______ << ((((___ << __) - _) << __) + _)) + (_______ << (((((_ << ___) + _)) << __))) - ((((((_ << ___) + _)) << __) + _) << ((((___ << __) + _) << _))) + (((_______ << __) - _) << (((((_ << ___) + _)) << _))) + (((___ << ___) + _) << ((_____ << _))) + (_____ << ______) + (_ << ___) ) ) )( *(lambda _, __, ___: _(_, __, ___))( (lambda _, __, ___: [__(___[(lambda: _).func_code.co_nlocals])] + _(_, __, ___[(lambda _: _).func_code.co_nlocals:]) if ___ else [] ), lambda _: _.func_code.co_argcount, ( lambda _: _, lambda _, __: _, lambda _, __, ___: _, lambda _, __, ___, ____: _, lambda _, __, ___, ____, _____: _, lambda _, __, ___, ____, _____, ______: _, lambda _, __, ___, ____, _____, ______, _______: _, lambda _, __, ___, ____, _____, ______, _______, ________: _ ) ) )
print 'Hello, World!'
Fix that damn obfuscated python
Fix that damn obfuscated python
Python
mit
natemara/super-important-project,natemara/super-important-project
python
## Code Before: (lambda _, __, ___, ____, _____, ______, _______, ________: getattr( __import__(True.__class__.__name__[_] + [].__class__.__name__[__]), ().__class__.__eq__.__class__.__name__[:__] + ().__iter__().__class__.__name__[_____:________] )( _, (lambda _, __, ___: _(_, __, ___))( lambda _, __, ___: chr(___ % __) + _(_, __, ___ // __) if ___ else (lambda: _).func_code.co_lnotab, _ << ________, (((_____ << ____) + _) << ((___ << _____) - ___)) + (((((___ << __) - _) << ___) + _) << ((_____ << ____) + (_ << _))) + (((_______ << __) - _) << (((((_ << ___) + _)) << ___) + (_ << _))) + (((_______ << ___) + _) << ((_ << ______) + _)) + (((_______ << ____) - _) << ((_______ << ___))) + (((_ << ____) - _) << ((((___ << __) + _) << __) - _)) - (_______ << ((((___ << __) - _) << __) + _)) + (_______ << (((((_ << ___) + _)) << __))) - ((((((_ << ___) + _)) << __) + _) << ((((___ << __) + _) << _))) + (((_______ << __) - _) << (((((_ << ___) + _)) << _))) + (((___ << ___) + _) << ((_____ << _))) + (_____ << ______) + (_ << ___) ) ) )( *(lambda _, __, ___: _(_, __, ___))( (lambda _, __, ___: [__(___[(lambda: _).func_code.co_nlocals])] + _(_, __, ___[(lambda _: _).func_code.co_nlocals:]) if ___ else [] ), lambda _: _.func_code.co_argcount, ( lambda _: _, lambda _, __: _, lambda _, __, ___: _, lambda _, __, ___, ____: _, lambda _, __, ___, ____, _____: _, lambda _, __, ___, ____, _____, ______: _, lambda _, __, ___, ____, _____, ______, _______: _, lambda _, __, ___, ____, _____, ______, _______, ________: _ ) ) ) ## Instruction: Fix that damn obfuscated python ## Code After: print 'Hello, World!'
// ... existing code ... print 'Hello, World!' // ... rest of the code ...
1f6ba483902c59dc70d15ea1e33957ac6a874f01
freesound_datasets/local_settings.example.py
freesound_datasets/local_settings.example.py
FS_CLIENT_ID = 'FREESOUND_KEY' FS_CLIENT_SECRET = 'FREESOUND_SECRET' # Freesound keys for "login with" functionality # Get credentials at http://www.freesound.org/apiv2/apply # Set callback url to http://localhost:8000/social/complete/freesound/ SOCIAL_AUTH_FREESOUND_KEY = None SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET' # Google keys for "login with" functionality # Get credentials at https://console.developers.google.com # Set callback url to http://localhost:8000/social/complete/google-oauth2/ SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = None # (remove the part starting with the dot .) SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'GOOGLE_SECRET' # Facebook keys for "login with" functionality # See instructions in https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html # NOTE: might not work in localhost SOCIAL_AUTH_FACEBOOK_KEY = None SOCIAL_AUTH_FACEBOOK_SECRET = 'FACEBOOK_SECRET' # Github keys for "login with" functionality # Get credentials at https://github.com/settings/applications/new # Set callback url to http://localhost:8000/social/complete/github/ SOCIAL_AUTH_GITHUB_KEY = None SOCIAL_AUTH_GITHUB_SECRET = 'GITHUB_SECRET'
FS_CLIENT_ID = 'FREESOUND_KEY' FS_CLIENT_SECRET = 'FREESOUND_SECRET' # Freesound keys for "login with" functionality # Get credentials at http://www.freesound.org/apiv2/apply # Set callback url to http://localhost:8000/social/complete/freesound/ SOCIAL_AUTH_FREESOUND_KEY = None SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
Remove unused social auth keys
Remove unused social auth keys
Python
agpl-3.0
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
python
## Code Before: FS_CLIENT_ID = 'FREESOUND_KEY' FS_CLIENT_SECRET = 'FREESOUND_SECRET' # Freesound keys for "login with" functionality # Get credentials at http://www.freesound.org/apiv2/apply # Set callback url to http://localhost:8000/social/complete/freesound/ SOCIAL_AUTH_FREESOUND_KEY = None SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET' # Google keys for "login with" functionality # Get credentials at https://console.developers.google.com # Set callback url to http://localhost:8000/social/complete/google-oauth2/ SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = None # (remove the part starting with the dot .) SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'GOOGLE_SECRET' # Facebook keys for "login with" functionality # See instructions in https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html # NOTE: might not work in localhost SOCIAL_AUTH_FACEBOOK_KEY = None SOCIAL_AUTH_FACEBOOK_SECRET = 'FACEBOOK_SECRET' # Github keys for "login with" functionality # Get credentials at https://github.com/settings/applications/new # Set callback url to http://localhost:8000/social/complete/github/ SOCIAL_AUTH_GITHUB_KEY = None SOCIAL_AUTH_GITHUB_SECRET = 'GITHUB_SECRET' ## Instruction: Remove unused social auth keys ## Code After: FS_CLIENT_ID = 'FREESOUND_KEY' FS_CLIENT_SECRET = 'FREESOUND_SECRET' # Freesound keys for "login with" functionality # Get credentials at http://www.freesound.org/apiv2/apply # Set callback url to http://localhost:8000/social/complete/freesound/ SOCIAL_AUTH_FREESOUND_KEY = None SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET'
// ... existing code ... # Set callback url to http://localhost:8000/social/complete/freesound/ SOCIAL_AUTH_FREESOUND_KEY = None SOCIAL_AUTH_FREESOUND_SECRET = 'FREESOUND_SECRET' // ... rest of the code ...
2e1b189727616b4c93ad4244299530c738304428
httpobs/scanner/utils.py
httpobs/scanner/utils.py
import socket import tld def valid_hostname(hostname: str) -> bool: """ :param hostname: The hostname requested in the scan :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return False # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return False except: pass # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't if hostname in tld.get_tld_names(): return False # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return False except: return False # If we've made it this far, then everything is good to go! Woohoo! return True
import socket def valid_hostname(hostname: str): """ :param hostname: The hostname requested in the scan :return: Hostname if it's valid, otherwise None """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return None except: pass # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return None except: return None # If we've made it this far, then everything is good to go! Woohoo! return hostname
Remove TLD check, allow for www
Remove TLD check, allow for www
Python
mpl-2.0
april/http-observatory,april/http-observatory,april/http-observatory,mozilla/http-observatory,mozilla/http-observatory,mozilla/http-observatory
python
## Code Before: import socket import tld def valid_hostname(hostname: str) -> bool: """ :param hostname: The hostname requested in the scan :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return False # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return False except: pass # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't if hostname in tld.get_tld_names(): return False # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return False except: return False # If we've made it this far, then everything is good to go! Woohoo! return True ## Instruction: Remove TLD check, allow for www ## Code After: import socket def valid_hostname(hostname: str): """ :param hostname: The hostname requested in the scan :return: Hostname if it's valid, otherwise None """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return None except: pass # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return None except: return None # If we've made it this far, then everything is good to go! Woohoo! return hostname
// ... existing code ... import socket def valid_hostname(hostname: str): """ :param hostname: The hostname requested in the scan :return: Hostname if it's valid, otherwise None """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass // ... modified code ... # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return None except: pass # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. ... hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return None except: return None # If we've made it this far, then everything is good to go! Woohoo! return hostname // ... rest of the code ...
7cde5e713ace2b0a1d9cdef01ac912f3a53814cd
run_scripts/build_phylogenies.py
run_scripts/build_phylogenies.py
import sys import dendrogenous as dg import dendrogenous.settings import dendrogenous.utils import dendrogenous.core import multiprocessing def main(settings_file): settings = dg.settings.Settings(settings_file) input_seqs = dg.utils.parse_seqs(settings.input_seqs) seqs_needing_run = dg.utils.check_already_run(settings, input_seqs) processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run] for p in processes: p.start() for p in processes: p.join() def build_phylogeny(seq, settings): seq_job = dg.core.Dendrogenous(seq, settings) seq_job.build_named_phylogeny() if __name__=='__main__': if len(sys.argv) != 2: print("USAGE: build_phylogenies.py settings.json") sys.exit(1) main(sys.argv[1])
import sys import dendrogenous as dg import dendrogenous.settings import dendrogenous.utils import dendrogenous.core import joblib import pickle #multiprocessing def main(settings_file): settings = dg.settings.Settings(settings_file) input_seqs = dg.utils.parse_seqs(settings.input_seqs) seqs_needing_run = dg.utils.check_already_run(settings, input_seqs) r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\ (seq, settings_file) for seq in seqs_needing_run) def pool_process(seq, settings_file): """ A hacky and unecessary way to provide a pickle serealisable object for multiprocessing to pass off to workers - inefficiency in reinstantiating a settings class every time """ settings = dg.settings.Settings(settings_file) seq_job = dg.core.Dendrogenous(seq, settings) seq_job.build_named_phylogeny() if __name__=='__main__': if len(sys.argv) != 2: print("USAGE: build_phylogenies.py settings.json") sys.exit(1) main(sys.argv[1])
Change run script to use worker pool
Change run script to use worker pool
Python
bsd-3-clause
fmaguire/dendrogenous
python
## Code Before: import sys import dendrogenous as dg import dendrogenous.settings import dendrogenous.utils import dendrogenous.core import multiprocessing def main(settings_file): settings = dg.settings.Settings(settings_file) input_seqs = dg.utils.parse_seqs(settings.input_seqs) seqs_needing_run = dg.utils.check_already_run(settings, input_seqs) processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run] for p in processes: p.start() for p in processes: p.join() def build_phylogeny(seq, settings): seq_job = dg.core.Dendrogenous(seq, settings) seq_job.build_named_phylogeny() if __name__=='__main__': if len(sys.argv) != 2: print("USAGE: build_phylogenies.py settings.json") sys.exit(1) main(sys.argv[1]) ## Instruction: Change run script to use worker pool ## Code After: import sys import dendrogenous as dg import dendrogenous.settings import dendrogenous.utils import dendrogenous.core import joblib import pickle #multiprocessing def main(settings_file): settings = dg.settings.Settings(settings_file) input_seqs = dg.utils.parse_seqs(settings.input_seqs) seqs_needing_run = dg.utils.check_already_run(settings, input_seqs) r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\ (seq, settings_file) for seq in seqs_needing_run) def pool_process(seq, settings_file): """ A hacky and unecessary way to provide a pickle serealisable object for multiprocessing to pass off to workers - inefficiency in reinstantiating a settings class every time """ settings = dg.settings.Settings(settings_file) seq_job = dg.core.Dendrogenous(seq, settings) seq_job.build_named_phylogeny() if __name__=='__main__': if len(sys.argv) != 2: print("USAGE: build_phylogenies.py settings.json") sys.exit(1) main(sys.argv[1])
... import dendrogenous.settings import dendrogenous.utils import dendrogenous.core import joblib import pickle #multiprocessing def main(settings_file): settings = dg.settings.Settings(settings_file) input_seqs = dg.utils.parse_seqs(settings.input_seqs) seqs_needing_run = dg.utils.check_already_run(settings, input_seqs) r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\ (seq, settings_file) for seq in seqs_needing_run) def pool_process(seq, settings_file): """ A hacky and unecessary way to provide a pickle serealisable object for multiprocessing to pass off to workers - inefficiency in reinstantiating a settings class every time """ settings = dg.settings.Settings(settings_file) seq_job = dg.core.Dendrogenous(seq, settings) seq_job.build_named_phylogeny() if __name__=='__main__': if len(sys.argv) != 2: ...
11e298829b132c9f192ff754845de178d736b8b2
include/System/plugin.h
include/System/plugin.h
/* $Id$ */ /* Copyright (c) 2008-2014 Pierre Pronchery <[email protected]> */ /* This file is part of DeforaOS System libSystem */ /* This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef LIBSYSTEM_SYSTEM_PLUGIN_H # define LIBSYSTEM_SYSTEM_PLUGIN_H # include "string.h" /* Plugin */ typedef void Plugin; /* functions */ Plugin * plugin_new(String const * libdir, String const * package, String const * type, String const * name); Plugin * plugin_new_self(void); void plugin_delete(Plugin * plugin); /* useful */ void * plugin_lookup(Plugin * plugin, String const * symbol); #endif /* !LIBSYSTEM_SYSTEM_PLUGIN_H */
/* $Id$ */ /* Copyright (c) 2008-2014 Pierre Pronchery <[email protected]> */ /* This file is part of DeforaOS System libSystem */ /* This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef LIBSYSTEM_SYSTEM_PLUGIN_H # define LIBSYSTEM_SYSTEM_PLUGIN_H # include "license.h" # include "string.h" /* Plugin */ typedef void Plugin; typedef struct _PluginHeader { char const * name; char const * icon; char const * description; LicenseFlags license; } PluginHeader; /* functions */ Plugin * plugin_new(String const * libdir, String const * package, String const * type, String const * name); Plugin * plugin_new_self(void); void plugin_delete(Plugin * plugin); /* useful */ void * plugin_lookup(Plugin * plugin, String const * symbol); #endif /* !LIBSYSTEM_SYSTEM_PLUGIN_H */
Define a standard header for plug-ins
Define a standard header for plug-ins
C
bsd-2-clause
DeforaOS/libSystem,DeforaOS/libSystem,DeforaOS/libSystem
c
## Code Before: /* $Id$ */ /* Copyright (c) 2008-2014 Pierre Pronchery <[email protected]> */ /* This file is part of DeforaOS System libSystem */ /* This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef LIBSYSTEM_SYSTEM_PLUGIN_H # define LIBSYSTEM_SYSTEM_PLUGIN_H # include "string.h" /* Plugin */ typedef void Plugin; /* functions */ Plugin * plugin_new(String const * libdir, String const * package, String const * type, String const * name); Plugin * plugin_new_self(void); void plugin_delete(Plugin * plugin); /* useful */ void * plugin_lookup(Plugin * plugin, String const * symbol); #endif /* !LIBSYSTEM_SYSTEM_PLUGIN_H */ ## Instruction: Define a standard header for plug-ins ## Code After: /* $Id$ */ /* Copyright (c) 2008-2014 Pierre Pronchery <[email protected]> */ /* This file is part of DeforaOS System libSystem */ /* This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef LIBSYSTEM_SYSTEM_PLUGIN_H # define LIBSYSTEM_SYSTEM_PLUGIN_H # include "license.h" # include "string.h" /* Plugin */ typedef void Plugin; typedef struct _PluginHeader { char const * name; char const * icon; char const * description; LicenseFlags license; } PluginHeader; /* functions */ Plugin * plugin_new(String const * libdir, String const * package, String const * type, String const * name); Plugin * plugin_new_self(void); void plugin_delete(Plugin * plugin); /* useful */ void * plugin_lookup(Plugin * plugin, String const * symbol); #endif /* !LIBSYSTEM_SYSTEM_PLUGIN_H */
# ... existing code ... #ifndef LIBSYSTEM_SYSTEM_PLUGIN_H # define LIBSYSTEM_SYSTEM_PLUGIN_H # include "license.h" # include "string.h" /* Plugin */ typedef void Plugin; typedef struct _PluginHeader { char const * name; char const * icon; char const * description; LicenseFlags license; } PluginHeader; /* functions */ # ... rest of the code ...
7c494c9216247d4480f2b293947b75947b1fcb01
readme_renderer/__main__.py
readme_renderer/__main__.py
from __future__ import absolute_import, print_function from readme_renderer.rst import render import sys if len(sys.argv) == 2: with open(sys.argv[1]) as fp: out = render(fp.read(), stream=sys.stderr) if out is not None: print(out) else: sys.exit(1) else: print("Syntax: python -m readme_renderer <file.rst>", file=sys.stderr)
from __future__ import absolute_import, print_function import argparse from readme_renderer.rst import render import sys if __name__ == '__main__': parser = argparse.ArgumentParser( description="Renders a .rst README to HTML", ) parser.add_argument('input', help="Input README file") parser.add_argument('-o', '--output', help="Output file (default: stdout)") args = parser.parse_args() if args.output: output_file = open(args.output, 'w') else: output_file = sys.stdout input_file = open(args.input) rendered = render(input_file.read(), stream=sys.stderr) if rendered is None: sys.exit(1) print(rendered, file=output_file)
Use `if __name__` and argparse
Use `if __name__` and argparse
Python
apache-2.0
pypa/readme_renderer,pypa/readme
python
## Code Before: from __future__ import absolute_import, print_function from readme_renderer.rst import render import sys if len(sys.argv) == 2: with open(sys.argv[1]) as fp: out = render(fp.read(), stream=sys.stderr) if out is not None: print(out) else: sys.exit(1) else: print("Syntax: python -m readme_renderer <file.rst>", file=sys.stderr) ## Instruction: Use `if __name__` and argparse ## Code After: from __future__ import absolute_import, print_function import argparse from readme_renderer.rst import render import sys if __name__ == '__main__': parser = argparse.ArgumentParser( description="Renders a .rst README to HTML", ) parser.add_argument('input', help="Input README file") parser.add_argument('-o', '--output', help="Output file (default: stdout)") args = parser.parse_args() if args.output: output_file = open(args.output, 'w') else: output_file = sys.stdout input_file = open(args.input) rendered = render(input_file.read(), stream=sys.stderr) if rendered is None: sys.exit(1) print(rendered, file=output_file)
... from __future__ import absolute_import, print_function import argparse from readme_renderer.rst import render import sys if __name__ == '__main__': parser = argparse.ArgumentParser( description="Renders a .rst README to HTML", ) parser.add_argument('input', help="Input README file") parser.add_argument('-o', '--output', help="Output file (default: stdout)") args = parser.parse_args() if args.output: output_file = open(args.output, 'w') else: output_file = sys.stdout input_file = open(args.input) rendered = render(input_file.read(), stream=sys.stderr) if rendered is None: sys.exit(1) print(rendered, file=output_file) ...
07ea0d8ec5c65f0fc94dc29f8b03402c571d3a42
qipipe/interfaces/fix_dicom.py
qipipe/interfaces/fix_dicom.py
import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_files = traits.List(desc="The modified output files", trait=File, exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_files'] = self._out_files return outputs
import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_file = File(desc="The modified output file", exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_file'] = self._out_file return outputs
Fix only one file at a time.
Fix only one file at a time.
Python
bsd-2-clause
ohsu-qin/qipipe
python
## Code Before: import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_files = traits.List(desc="The modified output files", trait=File, exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_files'] = self._out_files return outputs ## Instruction: Fix only one file at a time. ## Code After: import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_file = File(desc="The modified output file", exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_file'] = self._out_file return outputs
# ... existing code ... subject = traits.Str(desc='The subject name', mandatory=True) in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_file = File(desc="The modified output file", exists=True) class FixDicom(BaseInterface): # ... modified code ... output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_file'] = self._out_file return outputs # ... rest of the code ...
1ddb8b17d3c9057501c026eaa9fa6753024514d2
src/main/java/com/ezardlabs/lostsector/missions/Mission.java
src/main/java/com/ezardlabs/lostsector/missions/Mission.java
package com.ezardlabs.lostsector.missions; import com.ezardlabs.lostsector.objects.enemies.Enemy; public abstract class Mission { protected int enemiesKilled = 0; private long missionStart = 0; private EnemyStatusListener enemyStatusListener; public void startMission() { missionStart = System.currentTimeMillis(); } public long getMissionTime() { return System.currentTimeMillis() - missionStart; } public abstract void load(); public final void notifyEnemySpawn(Enemy enemy) { if (enemyStatusListener != null) { enemyStatusListener.onEnemySpawned(enemy); } } public final void notifyEnemyDeath(Enemy enemy) { enemiesKilled++; if (enemyStatusListener != null) { enemyStatusListener.onEnemyKilled(enemy); } } void setEnemyStatusListener(EnemyStatusListener enemyStatusListener) { this.enemyStatusListener = enemyStatusListener; } protected interface EnemyStatusListener { void onEnemySpawned(Enemy enemy); void onEnemyKilled(Enemy enemy); } }
package com.ezardlabs.lostsector.missions; import com.ezardlabs.lostsector.objects.enemies.Enemy; public abstract class Mission { protected int enemiesKilled = 0; private long missionStart = 0; private boolean completed = false; private EnemyStatusListener enemyStatusListener; private MissionStatusListener missionStatusListener; public void startMission() { missionStart = System.currentTimeMillis(); } public long getMissionTime() { return System.currentTimeMillis() - missionStart; } public abstract void load(); protected final void completedMission() { completed = true; if (missionStatusListener != null) { missionStatusListener.onMissionCompleted(); } } public final void notifyEnemySpawn(Enemy enemy) { if (enemyStatusListener != null) { enemyStatusListener.onEnemySpawned(enemy); } } public final void notifyEnemyDeath(Enemy enemy) { enemiesKilled++; if (enemyStatusListener != null) { enemyStatusListener.onEnemyKilled(enemy); } } void setEnemyStatusListener(EnemyStatusListener enemyStatusListener) { this.enemyStatusListener = enemyStatusListener; } void setMissionStatusListener(MissionStatusListener listener) { this.missionStatusListener = listener; } protected interface EnemyStatusListener { void onEnemySpawned(Enemy enemy); void onEnemyKilled(Enemy enemy); } protected interface MissionStatusListener { void onMissionCompleted(); } }
Add listener for mission status
Add listener for mission status
Java
mit
8-Bit-Warframe/Lost-Sector
java
## Code Before: package com.ezardlabs.lostsector.missions; import com.ezardlabs.lostsector.objects.enemies.Enemy; public abstract class Mission { protected int enemiesKilled = 0; private long missionStart = 0; private EnemyStatusListener enemyStatusListener; public void startMission() { missionStart = System.currentTimeMillis(); } public long getMissionTime() { return System.currentTimeMillis() - missionStart; } public abstract void load(); public final void notifyEnemySpawn(Enemy enemy) { if (enemyStatusListener != null) { enemyStatusListener.onEnemySpawned(enemy); } } public final void notifyEnemyDeath(Enemy enemy) { enemiesKilled++; if (enemyStatusListener != null) { enemyStatusListener.onEnemyKilled(enemy); } } void setEnemyStatusListener(EnemyStatusListener enemyStatusListener) { this.enemyStatusListener = enemyStatusListener; } protected interface EnemyStatusListener { void onEnemySpawned(Enemy enemy); void onEnemyKilled(Enemy enemy); } } ## Instruction: Add listener for mission status ## Code After: package com.ezardlabs.lostsector.missions; import com.ezardlabs.lostsector.objects.enemies.Enemy; public abstract class Mission { protected int enemiesKilled = 0; private long missionStart = 0; private boolean completed = false; private EnemyStatusListener enemyStatusListener; private MissionStatusListener missionStatusListener; public void startMission() { missionStart = System.currentTimeMillis(); } public long getMissionTime() { return System.currentTimeMillis() - missionStart; } public abstract void load(); protected final void completedMission() { completed = true; if (missionStatusListener != null) { missionStatusListener.onMissionCompleted(); } } public final void notifyEnemySpawn(Enemy enemy) { if (enemyStatusListener != null) { enemyStatusListener.onEnemySpawned(enemy); } } public final void notifyEnemyDeath(Enemy enemy) { enemiesKilled++; if (enemyStatusListener != null) { enemyStatusListener.onEnemyKilled(enemy); } } void setEnemyStatusListener(EnemyStatusListener enemyStatusListener) { this.enemyStatusListener = enemyStatusListener; } void setMissionStatusListener(MissionStatusListener listener) { this.missionStatusListener = listener; } protected interface EnemyStatusListener { void onEnemySpawned(Enemy enemy); void onEnemyKilled(Enemy enemy); } protected interface MissionStatusListener { void onMissionCompleted(); } }
# ... existing code ... public abstract class Mission { protected int enemiesKilled = 0; private long missionStart = 0; private boolean completed = false; private EnemyStatusListener enemyStatusListener; private MissionStatusListener missionStatusListener; public void startMission() { missionStart = System.currentTimeMillis(); # ... modified code ... } public abstract void load(); protected final void completedMission() { completed = true; if (missionStatusListener != null) { missionStatusListener.onMissionCompleted(); } } public final void notifyEnemySpawn(Enemy enemy) { if (enemyStatusListener != null) { ... this.enemyStatusListener = enemyStatusListener; } void setMissionStatusListener(MissionStatusListener listener) { this.missionStatusListener = listener; } protected interface EnemyStatusListener { void onEnemySpawned(Enemy enemy); void onEnemyKilled(Enemy enemy); } protected interface MissionStatusListener { void onMissionCompleted(); } } # ... rest of the code ...
c5609fe1b48cdd5740215c1d0783eaafdfe2e76b
listen/__init__.py
listen/__init__.py
from __future__ import print_function # This API requires Python 2.7 or more recent import sys if sys.version < "2.7.0": print("listen requires Python 2.7 or more recent") sys.exit(1) from listen.signal_handler import SignalHandler __all__ = ["listen"] __version__ = "0.1.0"
# Prepare for deprication of versions < 2.7 #from __future__ import print_function # This API requires Python 2.7 or more recent #import sys #if sys.version < "2.7.0": # print("listen requires Python 2.7 or more recent") # sys.exit(1) from listen.signal_handler import SignalHandler __all__ = ["listen"] __version__ = "0.1.1"
Remove requirement on python > 2.7
Remove requirement on python > 2.7
Python
mit
antevens/listen,antevens/listen
python
## Code Before: from __future__ import print_function # This API requires Python 2.7 or more recent import sys if sys.version < "2.7.0": print("listen requires Python 2.7 or more recent") sys.exit(1) from listen.signal_handler import SignalHandler __all__ = ["listen"] __version__ = "0.1.0" ## Instruction: Remove requirement on python > 2.7 ## Code After: # Prepare for deprication of versions < 2.7 #from __future__ import print_function # This API requires Python 2.7 or more recent #import sys #if sys.version < "2.7.0": # print("listen requires Python 2.7 or more recent") # sys.exit(1) from listen.signal_handler import SignalHandler __all__ = ["listen"] __version__ = "0.1.1"
... # Prepare for deprication of versions < 2.7 #from __future__ import print_function # This API requires Python 2.7 or more recent #import sys #if sys.version < "2.7.0": # print("listen requires Python 2.7 or more recent") # sys.exit(1) from listen.signal_handler import SignalHandler __all__ = ["listen"] __version__ = "0.1.1" ...
a4f78a09e6535ec90cede7e0d8295e8fd0f89734
commons/src/main/java/com/adaptc/mws/plugins/Suite.java
commons/src/main/java/com/adaptc/mws/plugins/Suite.java
package com.adaptc.mws.plugins; /** * This enum represents which suite or context Moab Web Services is running in - * HPC or Cloud. * @author bsaville */ public enum Suite { HPC("HPC"), CLOUD("Cloud"); /** * The default suite for MWS. This is equivalent to {@link #CLOUD}. */ public static final Suite DEFAULT_SUITE = Suite.CLOUD; private String str; private Suite(String str) { this.str = str; } /** * Returns the suite in a human-readable string, such as "Cloud" for {@link #CLOUD} and "HPC" for * {@link #HPC}. * @return A human-readable string */ public String toString() { return str; } /** * Returns {@link #DEFAULT_SUITE} by default if none is found matching. This is a * case insensitive match. Spaces and underscores are also equivalent for parsing. * @param suite The suite as a string * @return The corresponding suite value */ public static Suite parseString(String suite) { for(Suite val : values()) { if (val.toString().equalsIgnoreCase(suite) || val.name().equalsIgnoreCase(suite)) return val; } return DEFAULT_SUITE; } /** * A helper method to make sure that a true Suite reference may be used. * @param suite An actual suite value * @return The suite parameter */ public static Suite parseString(Suite suite) { return suite; } }
package com.adaptc.mws.plugins; /** * This enumeration represents which suite or context Moab Web Services is running in. * @author bsaville */ public enum Suite { HPC("HPC"), CLOUD("Cloud"); /** * The default suite for MWS. This is equivalent to {@link #CLOUD}. */ public static final Suite DEFAULT_SUITE = Suite.CLOUD; private String str; private Suite(String str) { this.str = str; } /** * Returns the suite in a human-readable string, such as "Cloud" for {@link #CLOUD} and "HPC" for * {@link #HPC}. * @return A human-readable string */ public String toString() { return str; } /** * Returns {@link #DEFAULT_SUITE} by default if none is found matching. This is a * case insensitive match. Spaces and underscores are also equivalent for parsing. * @param suite The suite as a string * @return The corresponding suite value */ public static Suite parseString(String suite) { for(Suite val : values()) { if (val.toString().equalsIgnoreCase(suite) || val.name().equalsIgnoreCase(suite)) return val; } return DEFAULT_SUITE; } /** * A helper method to make sure that a true Suite reference may be used. * @param suite An actual suite value * @return The suite parameter */ public static Suite parseString(Suite suite) { return suite; } }
Change comment on suite a bit
Change comment on suite a bit
Java
apache-2.0
adaptivecomputing/plugins-commons
java
## Code Before: package com.adaptc.mws.plugins; /** * This enum represents which suite or context Moab Web Services is running in - * HPC or Cloud. * @author bsaville */ public enum Suite { HPC("HPC"), CLOUD("Cloud"); /** * The default suite for MWS. This is equivalent to {@link #CLOUD}. */ public static final Suite DEFAULT_SUITE = Suite.CLOUD; private String str; private Suite(String str) { this.str = str; } /** * Returns the suite in a human-readable string, such as "Cloud" for {@link #CLOUD} and "HPC" for * {@link #HPC}. * @return A human-readable string */ public String toString() { return str; } /** * Returns {@link #DEFAULT_SUITE} by default if none is found matching. This is a * case insensitive match. Spaces and underscores are also equivalent for parsing. * @param suite The suite as a string * @return The corresponding suite value */ public static Suite parseString(String suite) { for(Suite val : values()) { if (val.toString().equalsIgnoreCase(suite) || val.name().equalsIgnoreCase(suite)) return val; } return DEFAULT_SUITE; } /** * A helper method to make sure that a true Suite reference may be used. * @param suite An actual suite value * @return The suite parameter */ public static Suite parseString(Suite suite) { return suite; } } ## Instruction: Change comment on suite a bit ## Code After: package com.adaptc.mws.plugins; /** * This enumeration represents which suite or context Moab Web Services is running in. * @author bsaville */ public enum Suite { HPC("HPC"), CLOUD("Cloud"); /** * The default suite for MWS. This is equivalent to {@link #CLOUD}. */ public static final Suite DEFAULT_SUITE = Suite.CLOUD; private String str; private Suite(String str) { this.str = str; } /** * Returns the suite in a human-readable string, such as "Cloud" for {@link #CLOUD} and "HPC" for * {@link #HPC}. * @return A human-readable string */ public String toString() { return str; } /** * Returns {@link #DEFAULT_SUITE} by default if none is found matching. This is a * case insensitive match. Spaces and underscores are also equivalent for parsing. * @param suite The suite as a string * @return The corresponding suite value */ public static Suite parseString(String suite) { for(Suite val : values()) { if (val.toString().equalsIgnoreCase(suite) || val.name().equalsIgnoreCase(suite)) return val; } return DEFAULT_SUITE; } /** * A helper method to make sure that a true Suite reference may be used. * @param suite An actual suite value * @return The suite parameter */ public static Suite parseString(Suite suite) { return suite; } }
... package com.adaptc.mws.plugins; /** * This enumeration represents which suite or context Moab Web Services is running in. * @author bsaville */ public enum Suite { ...
c2b84152f437e86cc2b55c1cc0bbb35d3fa645b5
example_no_runner.c
example_no_runner.c
TEST standalone_pass(void) { PASS(); } /* Add all the definitions that need to be in the test runner's main file. */ GREATEST_MAIN_DEFS(); int main(int argc, char **argv) { (void)argc; (void)argv; /* Initialize greatest, but don't build the CLI test runner code. */ GREATEST_INIT(); RUN_TEST(standalone_pass); /* Print report, but do not exit. */ printf("\nStandard report, as printed by greatest:\n"); GREATEST_PRINT_REPORT(); struct greatest_report_t report; greatest_get_report(&report); printf("\nCustom report:\n"); printf("pass %u, fail %u, skip %u, assertions %u\n", report.passed, report.failed, report.skipped, report.assertions); if (report.failed > 0) { return 1; } return 0; }
TEST standalone_pass(void) { PASS(); } /* Add all the definitions that need to be in the test runner's main file. */ GREATEST_MAIN_DEFS(); int main(int argc, char **argv) { struct greatest_report_t report; (void)argc; (void)argv; /* Initialize greatest, but don't build the CLI test runner code. */ GREATEST_INIT(); RUN_TEST(standalone_pass); /* Print report, but do not exit. */ printf("\nStandard report, as printed by greatest:\n"); GREATEST_PRINT_REPORT(); greatest_get_report(&report); printf("\nCustom report:\n"); printf("pass %u, fail %u, skip %u, assertions %u\n", report.passed, report.failed, report.skipped, report.assertions); if (report.failed > 0) { return 1; } return 0; }
Fix warning for mixing declarations and code in ISO C90.
Fix warning for mixing declarations and code in ISO C90.
C
isc
silentbicycle/greatest,silentbicycle/greatest
c
## Code Before: TEST standalone_pass(void) { PASS(); } /* Add all the definitions that need to be in the test runner's main file. */ GREATEST_MAIN_DEFS(); int main(int argc, char **argv) { (void)argc; (void)argv; /* Initialize greatest, but don't build the CLI test runner code. */ GREATEST_INIT(); RUN_TEST(standalone_pass); /* Print report, but do not exit. */ printf("\nStandard report, as printed by greatest:\n"); GREATEST_PRINT_REPORT(); struct greatest_report_t report; greatest_get_report(&report); printf("\nCustom report:\n"); printf("pass %u, fail %u, skip %u, assertions %u\n", report.passed, report.failed, report.skipped, report.assertions); if (report.failed > 0) { return 1; } return 0; } ## Instruction: Fix warning for mixing declarations and code in ISO C90. ## Code After: TEST standalone_pass(void) { PASS(); } /* Add all the definitions that need to be in the test runner's main file. */ GREATEST_MAIN_DEFS(); int main(int argc, char **argv) { struct greatest_report_t report; (void)argc; (void)argv; /* Initialize greatest, but don't build the CLI test runner code. */ GREATEST_INIT(); RUN_TEST(standalone_pass); /* Print report, but do not exit. */ printf("\nStandard report, as printed by greatest:\n"); GREATEST_PRINT_REPORT(); greatest_get_report(&report); printf("\nCustom report:\n"); printf("pass %u, fail %u, skip %u, assertions %u\n", report.passed, report.failed, report.skipped, report.assertions); if (report.failed > 0) { return 1; } return 0; }
... GREATEST_MAIN_DEFS(); int main(int argc, char **argv) { struct greatest_report_t report; (void)argc; (void)argv; ... printf("\nStandard report, as printed by greatest:\n"); GREATEST_PRINT_REPORT(); greatest_get_report(&report); printf("\nCustom report:\n"); ...
25cd8afdfede8a522f8d0f08ee4678a2e9c46a4b
curious/commands/__init__.py
curious/commands/__init__.py
import functools from curious.commands.command import Command def command(*args, **kwargs): """ A decorator to mark a function as a command. This will put a `factory` attribute on the function, which can later be called to create the Command instance. All arguments are passed to the Command class. """ def __inner(func): factory = functools.partial(Command, func, *args, **kwargs) func.factory = factory return func return __inner def event(func): """ Marks a function as an event. :param func: Either the function, or the name to give to the event. """ if isinstance(func, str): def __innr(f): f.event = func return f return __innr else: func.event = func.__name__[3:] return func
import functools from curious.commands.command import Command def command(*args, klass: type=Command, **kwargs): """ A decorator to mark a function as a command. This will put a `factory` attribute on the function, which can later be called to create the Command instance. All arguments are passed to the Command class. :param klass: The command class type to wrap the object in. """ def __inner(func): factory = functools.partial(klass, func, *args, **kwargs) func.factory = factory return func return __inner def event(func): """ Marks a function as an event. :param func: Either the function, or the name to give to the event. """ if isinstance(func, str): def __innr(f): f.event = func return f return __innr else: func.event = func.__name__[3:] return func
Allow changing what object is returned from Command instances.
Allow changing what object is returned from Command instances.
Python
mit
SunDwarf/curious
python
## Code Before: import functools from curious.commands.command import Command def command(*args, **kwargs): """ A decorator to mark a function as a command. This will put a `factory` attribute on the function, which can later be called to create the Command instance. All arguments are passed to the Command class. """ def __inner(func): factory = functools.partial(Command, func, *args, **kwargs) func.factory = factory return func return __inner def event(func): """ Marks a function as an event. :param func: Either the function, or the name to give to the event. """ if isinstance(func, str): def __innr(f): f.event = func return f return __innr else: func.event = func.__name__[3:] return func ## Instruction: Allow changing what object is returned from Command instances. ## Code After: import functools from curious.commands.command import Command def command(*args, klass: type=Command, **kwargs): """ A decorator to mark a function as a command. This will put a `factory` attribute on the function, which can later be called to create the Command instance. All arguments are passed to the Command class. :param klass: The command class type to wrap the object in. """ def __inner(func): factory = functools.partial(klass, func, *args, **kwargs) func.factory = factory return func return __inner def event(func): """ Marks a function as an event. :param func: Either the function, or the name to give to the event. """ if isinstance(func, str): def __innr(f): f.event = func return f return __innr else: func.event = func.__name__[3:] return func
# ... existing code ... from curious.commands.command import Command def command(*args, klass: type=Command, **kwargs): """ A decorator to mark a function as a command. This will put a `factory` attribute on the function, which can later be called to create the Command instance. All arguments are passed to the Command class. :param klass: The command class type to wrap the object in. """ def __inner(func): factory = functools.partial(klass, func, *args, **kwargs) func.factory = factory return func # ... rest of the code ...
afbe8ddff1791084aa1bcad775f1b01481b72c2b
larvae/person.py
larvae/person.py
from larvae.base import LarvaeBase class Person(LarvaeBase): """ Details for a Person in Popolo format. """ _schema_name = "person" __slots__ = ('name', '_id', 'gender', 'birth_date', 'death_date', 'image', 'summary', 'biography', 'links', 'other_names', 'extras', 'contact_details', 'openstates_id', 'chamber', 'district') _other_name_slots = ('name', 'start_date', 'end_date', 'note') def __init__(self, name, **kwargs): super(Person, self).__init__() self.name = name for k, v in kwargs.items(): setattr(self, k, v) self.links = [] self.other_names = [] self.extras = {} def add_name(self, name, **kwargs): other_name = {'name': name} for k, v in kwargs.items(): if k not in self._other_name_slots: raise AttributeError('{0} not a valid kwarg for add_name' .format(k)) other_name[k] = v self.other_names.append(other_name) def add_link(self, url, note): self.links.append({"note": note, "url": url}) def __unicode__(self): return self.name __str__ = __unicode__
from larvae.base import LarvaeBase class Person(LarvaeBase): """ Details for a Person in Popolo format. """ _schema_name = "person" __slots__ = ('name', '_id', 'gender', 'birth_date', 'death_date', 'image', 'summary', 'biography', 'links', 'other_names', 'extras', 'contact_details', 'openstates_id', 'chamber', 'district') _other_name_slots = ('name', 'start_date', 'end_date', 'note') def __init__(self, name, **kwargs): super(Person, self).__init__() self.name = name self.links = [] self.other_names = [] self.extras = {} for k, v in kwargs.items(): setattr(self, k, v) def add_name(self, name, **kwargs): other_name = {'name': name} for k, v in kwargs.items(): if k not in self._other_name_slots: raise AttributeError('{0} not a valid kwarg for add_name' .format(k)) other_name[k] = v self.other_names.append(other_name) def add_link(self, url, note): self.links.append({"note": note, "url": url}) def __unicode__(self): return self.name __str__ = __unicode__
Move default value assignments before kwargs
Move default value assignments before kwargs
Python
bsd-3-clause
AGarrow/larvae
python
## Code Before: from larvae.base import LarvaeBase class Person(LarvaeBase): """ Details for a Person in Popolo format. """ _schema_name = "person" __slots__ = ('name', '_id', 'gender', 'birth_date', 'death_date', 'image', 'summary', 'biography', 'links', 'other_names', 'extras', 'contact_details', 'openstates_id', 'chamber', 'district') _other_name_slots = ('name', 'start_date', 'end_date', 'note') def __init__(self, name, **kwargs): super(Person, self).__init__() self.name = name for k, v in kwargs.items(): setattr(self, k, v) self.links = [] self.other_names = [] self.extras = {} def add_name(self, name, **kwargs): other_name = {'name': name} for k, v in kwargs.items(): if k not in self._other_name_slots: raise AttributeError('{0} not a valid kwarg for add_name' .format(k)) other_name[k] = v self.other_names.append(other_name) def add_link(self, url, note): self.links.append({"note": note, "url": url}) def __unicode__(self): return self.name __str__ = __unicode__ ## Instruction: Move default value assignments before kwargs ## Code After: from larvae.base import LarvaeBase class Person(LarvaeBase): """ Details for a Person in Popolo format. """ _schema_name = "person" __slots__ = ('name', '_id', 'gender', 'birth_date', 'death_date', 'image', 'summary', 'biography', 'links', 'other_names', 'extras', 'contact_details', 'openstates_id', 'chamber', 'district') _other_name_slots = ('name', 'start_date', 'end_date', 'note') def __init__(self, name, **kwargs): super(Person, self).__init__() self.name = name self.links = [] self.other_names = [] self.extras = {} for k, v in kwargs.items(): setattr(self, k, v) def add_name(self, name, **kwargs): other_name = {'name': name} for k, v in kwargs.items(): if k not in self._other_name_slots: raise AttributeError('{0} not a valid kwarg for add_name' .format(k)) other_name[k] = v self.other_names.append(other_name) def add_link(self, url, note): self.links.append({"note": note, "url": url}) def __unicode__(self): return self.name __str__ = __unicode__
// ... existing code ... def __init__(self, name, **kwargs): super(Person, self).__init__() self.name = name self.links = [] self.other_names = [] self.extras = {} for k, v in kwargs.items(): setattr(self, k, v) def add_name(self, name, **kwargs): other_name = {'name': name} // ... rest of the code ...
ed09ed41e2b9486f55f801eee47f08e2a9679b6c
tests/sequence/test_alignment.py
tests/sequence/test_alignment.py
import unittest from unittest import mock from io import StringIO from cref.sequence.alignment import Blast class AlignmentTestCase(unittest.TestCase): def test_blast_local(self): blast = Blast('data/blastdb/pdbseqres') results = blast.align('AASSF') pdbs = {result.pdb_code for result in results} self.assertIn('1o61', pdbs) def test_blast_local_error(self): blast = Blast('db') with self.assertRaises(Exception) as cm: blast.align('AASSF') self.assertIn('Database error', cm.exception.args[-1]) def test_blast_web(self): blast = Blast() with mock.patch('cref.sequence.alignment.NCBIWWW.qblast') as qblast: with open('tests/samples/web_blast.xml') as qblast_results: qblast.return_value = StringIO(qblast_results.read()) results = blast.align('AASSF') self.assertIn('1o61', str(results)) self.assertEqual(len(results), 493) pdbs = {result.pdb_code for result in results} self.assertIn('1o61', pdbs)
import unittest from unittest import mock from io import StringIO from cref.sequence.alignment import Blast class AlignmentTestCase(unittest.TestCase): def test_blast_local(self): blast = Blast('data/blastdb/pdbseqres') results = blast.align('AASSF') pdbs = {result.pdb_code for result in results} self.assertIn('1o61', pdbs) def test_blast_local_error(self): blast = Blast('db') with self.assertRaises(Exception) as cm: blast.align('AASSF') self.assertIn('Database error', cm.exception.args[-1])
Fix broken test after blast web removal
Fix broken test after blast web removal
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
python
## Code Before: import unittest from unittest import mock from io import StringIO from cref.sequence.alignment import Blast class AlignmentTestCase(unittest.TestCase): def test_blast_local(self): blast = Blast('data/blastdb/pdbseqres') results = blast.align('AASSF') pdbs = {result.pdb_code for result in results} self.assertIn('1o61', pdbs) def test_blast_local_error(self): blast = Blast('db') with self.assertRaises(Exception) as cm: blast.align('AASSF') self.assertIn('Database error', cm.exception.args[-1]) def test_blast_web(self): blast = Blast() with mock.patch('cref.sequence.alignment.NCBIWWW.qblast') as qblast: with open('tests/samples/web_blast.xml') as qblast_results: qblast.return_value = StringIO(qblast_results.read()) results = blast.align('AASSF') self.assertIn('1o61', str(results)) self.assertEqual(len(results), 493) pdbs = {result.pdb_code for result in results} self.assertIn('1o61', pdbs) ## Instruction: Fix broken test after blast web removal ## Code After: import unittest from unittest import mock from io import StringIO from cref.sequence.alignment import Blast class AlignmentTestCase(unittest.TestCase): def test_blast_local(self): blast = Blast('data/blastdb/pdbseqres') results = blast.align('AASSF') pdbs = {result.pdb_code for result in results} self.assertIn('1o61', pdbs) def test_blast_local_error(self): blast = Blast('db') with self.assertRaises(Exception) as cm: blast.align('AASSF') self.assertIn('Database error', cm.exception.args[-1])
# ... existing code ... with self.assertRaises(Exception) as cm: blast.align('AASSF') self.assertIn('Database error', cm.exception.args[-1]) # ... rest of the code ...
3afdb5e467b4e31e2de98786c2b84467061fe181
lib/java_impl/talkhouse/Segmenter.java
lib/java_impl/talkhouse/Segmenter.java
package talkhouse; public class Segmenter { private double[] buf=null; int winsz; int winshift; static final int MIN_SEGMENTS = 3; public Segmenter(int winsz, int winshift) { this.winsz = winsz; this.winshift = winshift; } public double[][] apply(double[] data) { double[] combo; if (buf != null) { combo = new double[buf.length + data.length]; System.arraycopy(buf, 0, combo, 0, buf.length); System.arraycopy(data, 0, combo, buf.length, data.length); } else { combo = data; } if (combo.length < winsz + winshift * MIN_SEGMENTS){ buf = combo; return null; } else { buf = null; } double[][] result = new double[(combo.length-winsz)/winshift+1][]; int i = 0; int j=0; while (i+winsz <= combo.length) { double[] seg = new double[winsz]; System.arraycopy(combo, i, seg, 0, winsz); result[j++] = seg; i+=winshift; } int bufsize = combo.length - i; if (bufsize > 0) { if (buf == null || buf.length != bufsize) { buf = new double[bufsize]; } System.arraycopy(combo, combo.length - bufsize, buf, 0, bufsize); } else { buf = null; } return result; } };
package talkhouse; public class Segmenter { private double[] buf=null; int winsz; int winshift; static final int MIN_SEGMENTS = 3; public Segmenter(int winsz, int winshift) { this.winsz = winsz; this.winshift = winshift; } public double[][] apply(double[] data) { double[] combo; if (buf != null) { combo = new double[buf.length + data.length]; System.arraycopy(buf, 0, combo, 0, buf.length); System.arraycopy(data, 0, combo, buf.length, data.length); } else { combo = data; } if (combo.length < winsz + winshift * MIN_SEGMENTS){ buf = combo; return null; } else { buf = null; } int rows = (combo.length - combo.length % winshift - winsz + winshift)/ winshift; double[][] result = new double[rows][]; for (int i=0;i<rows;++i) { double[] seg = new double[winsz]; System.arraycopy(combo, i*winshift, seg, 0, winsz); result[i] = seg; } int bufsize = combo.length - rows * winshift; if (bufsize > 0) { if (buf == null || buf.length != bufsize) { buf = new double[bufsize]; } System.arraycopy(combo, combo.length - bufsize, buf, 0, bufsize); } else { buf = null; } return result; } };
Update Java segmenter to use the same formula as the C version.
Update Java segmenter to use the same formula as the C version.
Java
bsd-2-clause
talkhouse/noyes,talkhouse/noyes,talkhouse/noyes
java
## Code Before: package talkhouse; public class Segmenter { private double[] buf=null; int winsz; int winshift; static final int MIN_SEGMENTS = 3; public Segmenter(int winsz, int winshift) { this.winsz = winsz; this.winshift = winshift; } public double[][] apply(double[] data) { double[] combo; if (buf != null) { combo = new double[buf.length + data.length]; System.arraycopy(buf, 0, combo, 0, buf.length); System.arraycopy(data, 0, combo, buf.length, data.length); } else { combo = data; } if (combo.length < winsz + winshift * MIN_SEGMENTS){ buf = combo; return null; } else { buf = null; } double[][] result = new double[(combo.length-winsz)/winshift+1][]; int i = 0; int j=0; while (i+winsz <= combo.length) { double[] seg = new double[winsz]; System.arraycopy(combo, i, seg, 0, winsz); result[j++] = seg; i+=winshift; } int bufsize = combo.length - i; if (bufsize > 0) { if (buf == null || buf.length != bufsize) { buf = new double[bufsize]; } System.arraycopy(combo, combo.length - bufsize, buf, 0, bufsize); } else { buf = null; } return result; } }; ## Instruction: Update Java segmenter to use the same formula as the C version. ## Code After: package talkhouse; public class Segmenter { private double[] buf=null; int winsz; int winshift; static final int MIN_SEGMENTS = 3; public Segmenter(int winsz, int winshift) { this.winsz = winsz; this.winshift = winshift; } public double[][] apply(double[] data) { double[] combo; if (buf != null) { combo = new double[buf.length + data.length]; System.arraycopy(buf, 0, combo, 0, buf.length); System.arraycopy(data, 0, combo, buf.length, data.length); } else { combo = data; } if (combo.length < winsz + winshift * MIN_SEGMENTS){ buf = combo; return null; } else { buf = null; } int rows = (combo.length - combo.length % winshift - winsz + winshift)/ winshift; double[][] result = new double[rows][]; for (int i=0;i<rows;++i) { double[] seg = new double[winsz]; System.arraycopy(combo, i*winshift, seg, 0, winsz); result[i] = seg; } int bufsize = combo.length - rows * winshift; if (bufsize > 0) { if (buf == null || buf.length != bufsize) { buf = new double[bufsize]; } System.arraycopy(combo, combo.length - bufsize, buf, 0, bufsize); } else { buf = null; } return result; } };
# ... existing code ... } else { buf = null; } int rows = (combo.length - combo.length % winshift - winsz + winshift)/ winshift; double[][] result = new double[rows][]; for (int i=0;i<rows;++i) { double[] seg = new double[winsz]; System.arraycopy(combo, i*winshift, seg, 0, winsz); result[i] = seg; } int bufsize = combo.length - rows * winshift; if (bufsize > 0) { if (buf == null || buf.length != bufsize) { buf = new double[bufsize]; # ... rest of the code ...
34015dbc34b2f4e44b104070bae8c3d1956d7e12
is_valid/wrapper_predicates.py
is_valid/wrapper_predicates.py
import json def is_transformed(transform, predicate, *args, exceptions=[ Exception ], msg='data can\'t be transformed', **kwargs): def is_valid(data, explain=False): try: data = transform(data, *args, **kwargs) except Exception as e: if not any(isinstance(e, exc) for exc in exceptions): raise e return (False, msg) if explain else False return predicate(data, explain=explain) return is_valid def is_json(predicate, *args, **kwargs): return is_transformed(json.loads, predicate, *args, exceptions=[ json.JSONDecodeError ], msg='data is not valid json', **kwargs)
import json def is_transformed(transform, predicate, *args, exceptions=[ Exception ], msg='data can\'t be transformed', **kwargs): def is_valid(data, explain=False, include=False): try: data = transform(data, *args, **kwargs) except Exception as e: if not any(isinstance(e, exc) for exc in exceptions): raise e return ( (False, msg, None) if explain else (False, None) ) if include else ( (False, msg) if explain else False ) return (( predicate(data, explain=True) + (data,) ) if explain else ( predicate(data), data )) if include else predicate(data, explain=explain) return is_valid def is_json(predicate, *args, **kwargs): return is_transformed(json.loads, predicate, *args, exceptions=[ json.JSONDecodeError ], msg='data is not valid json', **kwargs)
Add include keyword arg to is_tranformed
Add include keyword arg to is_tranformed
Python
mit
Daanvdk/is_valid
python
## Code Before: import json def is_transformed(transform, predicate, *args, exceptions=[ Exception ], msg='data can\'t be transformed', **kwargs): def is_valid(data, explain=False): try: data = transform(data, *args, **kwargs) except Exception as e: if not any(isinstance(e, exc) for exc in exceptions): raise e return (False, msg) if explain else False return predicate(data, explain=explain) return is_valid def is_json(predicate, *args, **kwargs): return is_transformed(json.loads, predicate, *args, exceptions=[ json.JSONDecodeError ], msg='data is not valid json', **kwargs) ## Instruction: Add include keyword arg to is_tranformed ## Code After: import json def is_transformed(transform, predicate, *args, exceptions=[ Exception ], msg='data can\'t be transformed', **kwargs): def is_valid(data, explain=False, include=False): try: data = transform(data, *args, **kwargs) except Exception as e: if not any(isinstance(e, exc) for exc in exceptions): raise e return ( (False, msg, None) if explain else (False, None) ) if include else ( (False, msg) if explain else False ) return (( predicate(data, explain=True) + (data,) ) if explain else ( predicate(data), data )) if include else predicate(data, explain=explain) return is_valid def is_json(predicate, *args, **kwargs): return is_transformed(json.loads, predicate, *args, exceptions=[ json.JSONDecodeError ], msg='data is not valid json', **kwargs)
# ... existing code ... def is_transformed(transform, predicate, *args, exceptions=[ Exception ], msg='data can\'t be transformed', **kwargs): def is_valid(data, explain=False, include=False): try: data = transform(data, *args, **kwargs) except Exception as e: if not any(isinstance(e, exc) for exc in exceptions): raise e return ( (False, msg, None) if explain else (False, None) ) if include else ( (False, msg) if explain else False ) return (( predicate(data, explain=True) + (data,) ) if explain else ( predicate(data), data )) if include else predicate(data, explain=explain) return is_valid # ... rest of the code ...
5c3a5855067b9015dd58eab240526b229c770c98
src/test/java/com/apruve/ApruveEnvironmentTest.java
src/test/java/com/apruve/ApruveEnvironmentTest.java
package com.apruve; import static org.junit.Assert.*; import org.junit.Test; public class ApruveEnvironmentTest { @Test public void testBaseUrl() { assertEquals("https://www.apruve.com", ApruveEnvironment.PROD.getBaseUrl()); assertEquals("https://test.apruve.com", ApruveEnvironment.TEST.getBaseUrl()); } @Test public void testApiUrl() { assertEquals("https://www.apruve.com/api/v3", ApruveEnvironment.PROD.getApiV3Url()); assertEquals("https://test.apruve.com/api/v3", ApruveEnvironment.TEST.getApiV3Url()); } @Test public void testJsUrl() { assertEquals("https://www.apruve.com/js/apruve.js", ApruveEnvironment.PROD.getJsUrl()); assertEquals("https://test.apruve.com/js/apruve.js", ApruveEnvironment.TEST.getJsUrl()); } @Test public void testJsTag() { assertEquals("<script src=\"https://www.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.PROD.getJsTag()); assertEquals("<script src=\"https://test.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.TEST.getJsTag()); } }
package com.apruve; import static org.junit.Assert.*; import org.junit.Test; public class ApruveEnvironmentTest { @Test public void testBaseUrl() { assertEquals("https://app.apruve.com", ApruveEnvironment.PROD.getBaseUrl()); assertEquals("https://test.apruve.com", ApruveEnvironment.TEST.getBaseUrl()); } @Test public void testApiUrl() { assertEquals("https://app.apruve.com/api/v3", ApruveEnvironment.PROD.getApiV3Url()); assertEquals("https://test.apruve.com/api/v3", ApruveEnvironment.TEST.getApiV3Url()); } @Test public void testJsUrl() { assertEquals("https://app.apruve.com/js/apruve.js", ApruveEnvironment.PROD.getJsUrl()); assertEquals("https://test.apruve.com/js/apruve.js", ApruveEnvironment.TEST.getJsUrl()); } @Test public void testJsTag() { assertEquals("<script src=\"https://app.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.PROD.getJsTag()); assertEquals("<script src=\"https://test.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.TEST.getJsTag()); } }
Use app.apruve.com as the prod environment URL
Use app.apruve.com as the prod environment URL
Java
mit
apruve/apruve-java
java
## Code Before: package com.apruve; import static org.junit.Assert.*; import org.junit.Test; public class ApruveEnvironmentTest { @Test public void testBaseUrl() { assertEquals("https://www.apruve.com", ApruveEnvironment.PROD.getBaseUrl()); assertEquals("https://test.apruve.com", ApruveEnvironment.TEST.getBaseUrl()); } @Test public void testApiUrl() { assertEquals("https://www.apruve.com/api/v3", ApruveEnvironment.PROD.getApiV3Url()); assertEquals("https://test.apruve.com/api/v3", ApruveEnvironment.TEST.getApiV3Url()); } @Test public void testJsUrl() { assertEquals("https://www.apruve.com/js/apruve.js", ApruveEnvironment.PROD.getJsUrl()); assertEquals("https://test.apruve.com/js/apruve.js", ApruveEnvironment.TEST.getJsUrl()); } @Test public void testJsTag() { assertEquals("<script src=\"https://www.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.PROD.getJsTag()); assertEquals("<script src=\"https://test.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.TEST.getJsTag()); } } ## Instruction: Use app.apruve.com as the prod environment URL ## Code After: package com.apruve; import static org.junit.Assert.*; import org.junit.Test; public class ApruveEnvironmentTest { @Test public void testBaseUrl() { assertEquals("https://app.apruve.com", ApruveEnvironment.PROD.getBaseUrl()); assertEquals("https://test.apruve.com", ApruveEnvironment.TEST.getBaseUrl()); } @Test public void testApiUrl() { assertEquals("https://app.apruve.com/api/v3", ApruveEnvironment.PROD.getApiV3Url()); assertEquals("https://test.apruve.com/api/v3", ApruveEnvironment.TEST.getApiV3Url()); } @Test public void testJsUrl() { assertEquals("https://app.apruve.com/js/apruve.js", ApruveEnvironment.PROD.getJsUrl()); assertEquals("https://test.apruve.com/js/apruve.js", ApruveEnvironment.TEST.getJsUrl()); } @Test public void testJsTag() { assertEquals("<script src=\"https://app.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.PROD.getJsTag()); assertEquals("<script src=\"https://test.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.TEST.getJsTag()); } }
// ... existing code ... @Test public void testBaseUrl() { assertEquals("https://app.apruve.com", ApruveEnvironment.PROD.getBaseUrl()); assertEquals("https://test.apruve.com", ApruveEnvironment.TEST.getBaseUrl()); } @Test public void testApiUrl() { assertEquals("https://app.apruve.com/api/v3", ApruveEnvironment.PROD.getApiV3Url()); assertEquals("https://test.apruve.com/api/v3", ApruveEnvironment.TEST.getApiV3Url()); } @Test public void testJsUrl() { assertEquals("https://app.apruve.com/js/apruve.js", ApruveEnvironment.PROD.getJsUrl()); assertEquals("https://test.apruve.com/js/apruve.js", ApruveEnvironment.TEST.getJsUrl()); } @Test public void testJsTag() { assertEquals("<script src=\"https://app.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.PROD.getJsTag()); assertEquals("<script src=\"https://test.apruve.com/js/apruve.js\" type=\"text/javascript\"></script>", ApruveEnvironment.TEST.getJsTag()); } } // ... rest of the code ...
59e2753ba40cbbe6dba4a8ca2ebc005945a2e8ee
modules/security/src/main/java/io/liveoak/security/spi/AuthzPolicyGroup.java
modules/security/src/main/java/io/liveoak/security/spi/AuthzPolicyGroup.java
package io.liveoak.security.spi; import java.util.ArrayList; import java.util.List; /** * @author Bob McWhirter */ public class AuthzPolicyGroup { public AuthzPolicyGroup() { this.entries = new ArrayList<>(); } public AuthzPolicyGroup(List<AuthzPolicyEntry> entries) { this.entries = entries; } public List<AuthzPolicyEntry> entries() { return this.entries; } public void entries(List<AuthzPolicyEntry> entries) { this.entries = entries; } private List<AuthzPolicyEntry> entries; }
package io.liveoak.security.spi; import java.util.ArrayList; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; /** * @author Bob McWhirter */ public class AuthzPolicyGroup { public AuthzPolicyGroup() { this.entries = new ArrayList<>(); } public AuthzPolicyGroup(List<AuthzPolicyEntry> entries) { this.entries = entries; } public List<AuthzPolicyEntry> entries() { return this.entries; } public void entries(List<AuthzPolicyEntry> entries) { this.entries = entries; } @JsonProperty("policies") private List<AuthzPolicyEntry> entries; }
Fix root element of authz-config.json to be 'policies'
Fix root element of authz-config.json to be 'policies'
Java
epl-1.0
kyroskoh/liveoak,kyroskoh/liveoak,liveoak-io/liveoak,ljshj/liveoak,ammendonca/liveoak,kyroskoh/liveoak,liveoak-io/liveoak,liveoak-io/liveoak,ljshj/liveoak,ljshj/liveoak,ammendonca/liveoak,ljshj/liveoak,ammendonca/liveoak,ammendonca/liveoak,liveoak-io/liveoak,kyroskoh/liveoak
java
## Code Before: package io.liveoak.security.spi; import java.util.ArrayList; import java.util.List; /** * @author Bob McWhirter */ public class AuthzPolicyGroup { public AuthzPolicyGroup() { this.entries = new ArrayList<>(); } public AuthzPolicyGroup(List<AuthzPolicyEntry> entries) { this.entries = entries; } public List<AuthzPolicyEntry> entries() { return this.entries; } public void entries(List<AuthzPolicyEntry> entries) { this.entries = entries; } private List<AuthzPolicyEntry> entries; } ## Instruction: Fix root element of authz-config.json to be 'policies' ## Code After: package io.liveoak.security.spi; import java.util.ArrayList; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; /** * @author Bob McWhirter */ public class AuthzPolicyGroup { public AuthzPolicyGroup() { this.entries = new ArrayList<>(); } public AuthzPolicyGroup(List<AuthzPolicyEntry> entries) { this.entries = entries; } public List<AuthzPolicyEntry> entries() { return this.entries; } public void entries(List<AuthzPolicyEntry> entries) { this.entries = entries; } @JsonProperty("policies") private List<AuthzPolicyEntry> entries; }
# ... existing code ... import java.util.ArrayList; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; /** * @author Bob McWhirter # ... modified code ... this.entries = entries; } @JsonProperty("policies") private List<AuthzPolicyEntry> entries; } # ... rest of the code ...
e2971406eb3b2ecdd211b9e7403fa02ae725b115
misc/miscfn.h
misc/miscfn.h
char *realpath(const char *path, char resolved_path []); #endif #if NEED_TIMEZONE #include <sys/stdtypes.h> extern time_t timezone; #endif #if NEED_MYREALLOC #include <sys/stdtypes.h> #define realloc(ptr,size) myrealloc(ptr,size) extern void *myrealloc(void *, size_t); #endif #if HAVE_SYS_SOCKET_H #include <sys/socket.h> #endif #endif
char *realpath(const char *path, char resolved_path []); #endif #if NEED_TIMEZONE #include <sys/stdtypes.h> extern time_t timezone; #endif #if NEED_MYREALLOC #include <sys/stdtypes.h> #define realloc(ptr,size) myrealloc(ptr,size) extern void *myrealloc(void *, size_t); #endif #if HAVE_SYS_SOCKET_H #include <sys/socket.h> #endif #if HAVE_LIMITS_H #include <limits.h> #endif #endif
Include <limits.h> if it's available.
Include <limits.h> if it's available.
C
lgpl-2.1
devzero2000/RPM5,devzero2000/RPM5,devzero2000/RPM5,devzero2000/RPM5,devzero2000/RPM5,devzero2000/RPM5,devzero2000/RPM5
c
## Code Before: char *realpath(const char *path, char resolved_path []); #endif #if NEED_TIMEZONE #include <sys/stdtypes.h> extern time_t timezone; #endif #if NEED_MYREALLOC #include <sys/stdtypes.h> #define realloc(ptr,size) myrealloc(ptr,size) extern void *myrealloc(void *, size_t); #endif #if HAVE_SYS_SOCKET_H #include <sys/socket.h> #endif #endif ## Instruction: Include <limits.h> if it's available. ## Code After: char *realpath(const char *path, char resolved_path []); #endif #if NEED_TIMEZONE #include <sys/stdtypes.h> extern time_t timezone; #endif #if NEED_MYREALLOC #include <sys/stdtypes.h> #define realloc(ptr,size) myrealloc(ptr,size) extern void *myrealloc(void *, size_t); #endif #if HAVE_SYS_SOCKET_H #include <sys/socket.h> #endif #if HAVE_LIMITS_H #include <limits.h> #endif #endif
... #include <sys/socket.h> #endif #if HAVE_LIMITS_H #include <limits.h> #endif #endif ...
7a30eef3673621b57a3dce3e7adced48b3944a08
konashi-android-sdk/src/main/java/com/uxxu/konashi/lib/KonashiErrorType.java
konashi-android-sdk/src/main/java/com/uxxu/konashi/lib/KonashiErrorType.java
package com.uxxu.konashi.lib; import info.izumin.android.bletia.BletiaErrorType; /** * Created by izumin on 9/16/15. */ public enum KonashiErrorType implements BletiaErrorType{ INVALID_PARAMETER, NOT_ENABLED_UART, NOT_ENABLED_I2C; @Override public int getCode() { return -1; } @Override public String getName() { return name(); } }
package com.uxxu.konashi.lib; import info.izumin.android.bletia.BletiaErrorType; /** * Created by izumin on 9/16/15. */ public enum KonashiErrorType implements BletiaErrorType{ INVALID_PARAMETER, NOT_ENABLED_UART, NOT_ENABLED_I2C, INVALID_PIN_NUMBER, INVALID_MODE, INVALID_PULLUP_PARAM, INVALID_OUTPUT_PARAM, DATA_SIZE_TOO_LONG, INVALID_BAUDRATE, NO_ERROR; @Override public int getCode() { return -1; } @Override public String getName() { return name(); } }
Add ErrorType refer to issue
Add ErrorType refer to issue
Java
apache-2.0
YUKAI/konashi-android-sdk,kiryuxxu/konashi-android-sdk,YUKAI/konashi-android-sdk
java
## Code Before: package com.uxxu.konashi.lib; import info.izumin.android.bletia.BletiaErrorType; /** * Created by izumin on 9/16/15. */ public enum KonashiErrorType implements BletiaErrorType{ INVALID_PARAMETER, NOT_ENABLED_UART, NOT_ENABLED_I2C; @Override public int getCode() { return -1; } @Override public String getName() { return name(); } } ## Instruction: Add ErrorType refer to issue ## Code After: package com.uxxu.konashi.lib; import info.izumin.android.bletia.BletiaErrorType; /** * Created by izumin on 9/16/15. */ public enum KonashiErrorType implements BletiaErrorType{ INVALID_PARAMETER, NOT_ENABLED_UART, NOT_ENABLED_I2C, INVALID_PIN_NUMBER, INVALID_MODE, INVALID_PULLUP_PARAM, INVALID_OUTPUT_PARAM, DATA_SIZE_TOO_LONG, INVALID_BAUDRATE, NO_ERROR; @Override public int getCode() { return -1; } @Override public String getName() { return name(); } }
... public enum KonashiErrorType implements BletiaErrorType{ INVALID_PARAMETER, NOT_ENABLED_UART, NOT_ENABLED_I2C, INVALID_PIN_NUMBER, INVALID_MODE, INVALID_PULLUP_PARAM, INVALID_OUTPUT_PARAM, DATA_SIZE_TOO_LONG, INVALID_BAUDRATE, NO_ERROR; @Override public int getCode() { ...
34db4460aa67fc9abfaaaf2c48a6ea7c5b801ff0
examples/libtest/imports/__init__.py
examples/libtest/imports/__init__.py
exec_order = [] class Imports(object): exec_order = exec_order def __init__(self): self.v = 1 imports = Imports() overrideme = "not overridden" from . import cls as loccls from .imports import cls as upcls def conditional_func(): return "not overridden" if True: def conditional_func(): return "overridden"
exec_order = [] class Imports(object): exec_order = exec_order def __init__(self): self.v = 1 imports = Imports() overrideme = "not overridden" from . import cls as loccls # This is not valid since Python 2.6! try: from .imports import cls as upcls except ImportError: upcls = loccls def conditional_func(): return "not overridden" if True: def conditional_func(): return "overridden"
Fix for libtest for cpython 2.6 / jython / pypy
Fix for libtest for cpython 2.6 / jython / pypy
Python
apache-2.0
spaceone/pyjs,minghuascode/pyj,pombredanne/pyjs,lancezlin/pyjs,Hasimir/pyjs,gpitel/pyjs,pyjs/pyjs,anandology/pyjamas,gpitel/pyjs,pombredanne/pyjs,minghuascode/pyj,minghuascode/pyj,lancezlin/pyjs,pyjs/pyjs,pyjs/pyjs,pyjs/pyjs,minghuascode/pyj,spaceone/pyjs,spaceone/pyjs,anandology/pyjamas,Hasimir/pyjs,pombredanne/pyjs,pombredanne/pyjs,gpitel/pyjs,anandology/pyjamas,Hasimir/pyjs,spaceone/pyjs,Hasimir/pyjs,lancezlin/pyjs,lancezlin/pyjs,anandology/pyjamas,gpitel/pyjs
python
## Code Before: exec_order = [] class Imports(object): exec_order = exec_order def __init__(self): self.v = 1 imports = Imports() overrideme = "not overridden" from . import cls as loccls from .imports import cls as upcls def conditional_func(): return "not overridden" if True: def conditional_func(): return "overridden" ## Instruction: Fix for libtest for cpython 2.6 / jython / pypy ## Code After: exec_order = [] class Imports(object): exec_order = exec_order def __init__(self): self.v = 1 imports = Imports() overrideme = "not overridden" from . import cls as loccls # This is not valid since Python 2.6! try: from .imports import cls as upcls except ImportError: upcls = loccls def conditional_func(): return "not overridden" if True: def conditional_func(): return "overridden"
... overrideme = "not overridden" from . import cls as loccls # This is not valid since Python 2.6! try: from .imports import cls as upcls except ImportError: upcls = loccls def conditional_func(): return "not overridden" ...
dc978c140f7905aa7604a346baea53a3be6ec0de
src/test/java/net/sf/jabref/logic/AuthorListTest.java
src/test/java/net/sf/jabref/logic/AuthorListTest.java
package net.sf.jabref.logic; import net.sf.jabref.export.layout.format.CreateDocBookAuthors; import org.junit.Test; import static org.junit.Assert.assertEquals; public class AuthorListTest { @Test public void authorListTest() { String authorString = "Olaf von Nilsen, Jr."; AuthorList authorList = AuthorList.getAuthorList(authorString); for (int i = 0; i < authorList.size(); i++) { AuthorList.Author author = authorList.getAuthor(i); assertEquals("Jr.", author.getFirst()); assertEquals("Olaf von Nilsen", author.getLast()); assertEquals(null, author.getJr()); assertEquals(null, author.getVon()); } assertEquals("<author><firstname>Jr.</firstname><surname>Olaf von Nilsen</surname></author>", new CreateDocBookAuthors().format(authorString)); } }
package net.sf.jabref.logic; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertEquals; public class AuthorListTest { @Ignore @Test public void authorListTest() { String authorString = "Olaf von Nilsen, Jr."; AuthorList authorList = AuthorList.getAuthorList(authorString); for (int i = 0; i < authorList.size(); i++) { AuthorList.Author author = authorList.getAuthor(i); assertEquals("Olaf", author.getFirst()); assertEquals("Nilsen", author.getLast()); assertEquals("Jr.", author.getJr()); assertEquals("von", author.getVon()); } } }
Fix and disable AuthorList test case
Fix and disable AuthorList test case The parsing logic in AuthorList.Author is broken and also terribly written (Even Intellij complains that it is too complex for its dataflow analyzer). Until somebody has the will to fix this, there is no point in executing this test.
Java
mit
JabRef/jabref,jhshinn/jabref,mredaelli/jabref,Mr-DLib/jabref,jhshinn/jabref,ayanai1/jabref,mredaelli/jabref,Siedlerchr/jabref,zellerdev/jabref,Mr-DLib/jabref,grimes2/jabref,tschechlovdev/jabref,oscargus/jabref,Braunch/jabref,Braunch/jabref,bartsch-dev/jabref,tschechlovdev/jabref,mairdl/jabref,motokito/jabref,jhshinn/jabref,JabRef/jabref,Braunch/jabref,mairdl/jabref,tschechlovdev/jabref,zellerdev/jabref,JabRef/jabref,grimes2/jabref,grimes2/jabref,sauliusg/jabref,motokito/jabref,motokito/jabref,tobiasdiez/jabref,mairdl/jabref,Braunch/jabref,sauliusg/jabref,bartsch-dev/jabref,obraliar/jabref,oscargus/jabref,zellerdev/jabref,grimes2/jabref,tschechlovdev/jabref,ayanai1/jabref,zellerdev/jabref,Mr-DLib/jabref,obraliar/jabref,jhshinn/jabref,jhshinn/jabref,shitikanth/jabref,tobiasdiez/jabref,bartsch-dev/jabref,shitikanth/jabref,Mr-DLib/jabref,mredaelli/jabref,grimes2/jabref,obraliar/jabref,obraliar/jabref,shitikanth/jabref,sauliusg/jabref,Braunch/jabref,Mr-DLib/jabref,motokito/jabref,bartsch-dev/jabref,mairdl/jabref,oscargus/jabref,mredaelli/jabref,shitikanth/jabref,tschechlovdev/jabref,motokito/jabref,obraliar/jabref,oscargus/jabref,Siedlerchr/jabref,ayanai1/jabref,shitikanth/jabref,tobiasdiez/jabref,tobiasdiez/jabref,mairdl/jabref,zellerdev/jabref,mredaelli/jabref,Siedlerchr/jabref,sauliusg/jabref,JabRef/jabref,Siedlerchr/jabref,oscargus/jabref,bartsch-dev/jabref,ayanai1/jabref,ayanai1/jabref
java
## Code Before: package net.sf.jabref.logic; import net.sf.jabref.export.layout.format.CreateDocBookAuthors; import org.junit.Test; import static org.junit.Assert.assertEquals; public class AuthorListTest { @Test public void authorListTest() { String authorString = "Olaf von Nilsen, Jr."; AuthorList authorList = AuthorList.getAuthorList(authorString); for (int i = 0; i < authorList.size(); i++) { AuthorList.Author author = authorList.getAuthor(i); assertEquals("Jr.", author.getFirst()); assertEquals("Olaf von Nilsen", author.getLast()); assertEquals(null, author.getJr()); assertEquals(null, author.getVon()); } assertEquals("<author><firstname>Jr.</firstname><surname>Olaf von Nilsen</surname></author>", new CreateDocBookAuthors().format(authorString)); } } ## Instruction: Fix and disable AuthorList test case The parsing logic in AuthorList.Author is broken and also terribly written (Even Intellij complains that it is too complex for its dataflow analyzer). Until somebody has the will to fix this, there is no point in executing this test. ## Code After: package net.sf.jabref.logic; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertEquals; public class AuthorListTest { @Ignore @Test public void authorListTest() { String authorString = "Olaf von Nilsen, Jr."; AuthorList authorList = AuthorList.getAuthorList(authorString); for (int i = 0; i < authorList.size(); i++) { AuthorList.Author author = authorList.getAuthor(i); assertEquals("Olaf", author.getFirst()); assertEquals("Nilsen", author.getLast()); assertEquals("Jr.", author.getJr()); assertEquals("von", author.getVon()); } } }
# ... existing code ... package net.sf.jabref.logic; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertEquals; # ... modified code ... public class AuthorListTest { @Ignore @Test public void authorListTest() { String authorString = "Olaf von Nilsen, Jr."; ... AuthorList authorList = AuthorList.getAuthorList(authorString); for (int i = 0; i < authorList.size(); i++) { AuthorList.Author author = authorList.getAuthor(i); assertEquals("Olaf", author.getFirst()); assertEquals("Nilsen", author.getLast()); assertEquals("Jr.", author.getJr()); assertEquals("von", author.getVon()); } } } # ... rest of the code ...
8c23ad1877a0af91e1b9a8512aa7476852de205c
kombu_fernet/serializers/__init__.py
kombu_fernet/serializers/__init__.py
from __future__ import unicode_literals, absolute_import import os from cryptography.fernet import Fernet, InvalidToken fernet = Fernet(os.environ['KOMBU_FERNET_KEY']) fallback_fernet = None try: fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY']) except KeyError: pass def fernet_encode(func): def inner(message): return fernet.encrypt(func(message)) return inner def fernet_decode(func): def inner(encoded_message): if isinstance(encoded_message, unicode): encoded_message = encoded_message.encode('utf-8') try: message = fernet.decrypt(encoded_message) except InvalidToken: message = fallback_fernet.decrypt(encoded_message) return func(message) return inner
from __future__ import unicode_literals, absolute_import import os from cryptography.fernet import Fernet, MultiFernet fernet = Fernet(os.environ['KOMBU_FERNET_KEY']) fallback_fernet = None try: fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY']) except KeyError: pass else: fernet = MultiFernet([fernet, fallback_fernet]) def fernet_encode(func): def inner(message): return fernet.encrypt(func(message)) return inner def fernet_decode(func): def inner(encoded_message): if isinstance(encoded_message, unicode): encoded_message = encoded_message.encode('utf-8') message = fernet.decrypt(encoded_message) return func(message) return inner
Use MultiFernet provided by cryptography lib
Use MultiFernet provided by cryptography lib Closes #9
Python
mit
heroku/kombu-fernet-serializers
python
## Code Before: from __future__ import unicode_literals, absolute_import import os from cryptography.fernet import Fernet, InvalidToken fernet = Fernet(os.environ['KOMBU_FERNET_KEY']) fallback_fernet = None try: fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY']) except KeyError: pass def fernet_encode(func): def inner(message): return fernet.encrypt(func(message)) return inner def fernet_decode(func): def inner(encoded_message): if isinstance(encoded_message, unicode): encoded_message = encoded_message.encode('utf-8') try: message = fernet.decrypt(encoded_message) except InvalidToken: message = fallback_fernet.decrypt(encoded_message) return func(message) return inner ## Instruction: Use MultiFernet provided by cryptography lib Closes #9 ## Code After: from __future__ import unicode_literals, absolute_import import os from cryptography.fernet import Fernet, MultiFernet fernet = Fernet(os.environ['KOMBU_FERNET_KEY']) fallback_fernet = None try: fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY']) except KeyError: pass else: fernet = MultiFernet([fernet, fallback_fernet]) def fernet_encode(func): def inner(message): return fernet.encrypt(func(message)) return inner def fernet_decode(func): def inner(encoded_message): if isinstance(encoded_message, unicode): encoded_message = encoded_message.encode('utf-8') message = fernet.decrypt(encoded_message) return func(message) return inner
... from __future__ import unicode_literals, absolute_import import os from cryptography.fernet import Fernet, MultiFernet fernet = Fernet(os.environ['KOMBU_FERNET_KEY']) fallback_fernet = None ... fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY']) except KeyError: pass else: fernet = MultiFernet([fernet, fallback_fernet]) def fernet_encode(func): ... def inner(encoded_message): if isinstance(encoded_message, unicode): encoded_message = encoded_message.encode('utf-8') message = fernet.decrypt(encoded_message) return func(message) return inner ...
4ec2672dc22c3477984e335e3678f3a2e69ecbd2
wger/exercises/migrations/0018_delete_pending_exercises.py
wger/exercises/migrations/0018_delete_pending_exercises.py
from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [migrations.RunPython(delete_pending_exercises)]
from django.db import migrations from django.conf import settings def delete_pending_bases(apps, schema_editor): """ Delete all pending bases Note that we can't access STATUS_PENDING here because we are not using a real model. """ Base = apps.get_model("exercises", "ExerciseBase") Base.objects.filter(status='1').delete() def delete_pending_translations(apps, schema_editor): """ Delete all pending translations Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_bases), migrations.RunPython(delete_pending_translations), ]
Delete both pending bases and translations
Delete both pending bases and translations
Python
agpl-3.0
wger-project/wger,wger-project/wger,wger-project/wger,wger-project/wger
python
## Code Before: from django.db import migrations from django.conf import settings def delete_pending_exercises(apps, schema_editor): """ Delete all pending exercises Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "ExerciseBase") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [migrations.RunPython(delete_pending_exercises)] ## Instruction: Delete both pending bases and translations ## Code After: from django.db import migrations from django.conf import settings def delete_pending_bases(apps, schema_editor): """ Delete all pending bases Note that we can't access STATUS_PENDING here because we are not using a real model. """ Base = apps.get_model("exercises", "ExerciseBase") Base.objects.filter(status='1').delete() def delete_pending_translations(apps, schema_editor): """ Delete all pending translations Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0014_merge_20210818_1735'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_bases), migrations.RunPython(delete_pending_translations), ]
// ... existing code ... from django.conf import settings def delete_pending_bases(apps, schema_editor): """ Delete all pending bases Note that we can't access STATUS_PENDING here because we are not using a real model. """ Base = apps.get_model("exercises", "ExerciseBase") Base.objects.filter(status='1').delete() def delete_pending_translations(apps, schema_editor): """ Delete all pending translations Note that we can't access STATUS_PENDING here because we are not using a real model. """ Exercise = apps.get_model("exercises", "Exercise") Exercise.objects.filter(status='1').delete() // ... modified code ... ('exercises', '0017_muscle_name_en'), ] operations = [ migrations.RunPython(delete_pending_bases), migrations.RunPython(delete_pending_translations), ] // ... rest of the code ...
8e4248cc73e4c4f1531c626dfda688224045c148
setup.py
setup.py
from setuptools import setup setup( name='lace', version='0.1.1', description='Neural Learning to Rank using Chainer', url='https://github.com/rjagerman/lace', download_url = 'https://github.com/rjagerman/lace/archive/v0.1.1.tar.gz', author='Rolf Jagerman', author_email='[email protected]', license='MIT', packages=['lace'] )
from setuptools import setup setup( name='lace', version='0.1.1', description='Neural Learning to Rank using Chainer', url='https://github.com/rjagerman/lace', download_url = 'https://github.com/rjagerman/lace/archive/v0.1.1.tar.gz', author='Rolf Jagerman', author_email='[email protected]', license='MIT', packages=['lace', 'lace.functions', 'lace.loss'], install_requires=['numpy>=1.12.0', 'chainer>=2.0.0'], tests_require=['nose'] )
Add correct packages and requirements for python package
Add correct packages and requirements for python package
Python
mit
rjagerman/shoelace
python
## Code Before: from setuptools import setup setup( name='lace', version='0.1.1', description='Neural Learning to Rank using Chainer', url='https://github.com/rjagerman/lace', download_url = 'https://github.com/rjagerman/lace/archive/v0.1.1.tar.gz', author='Rolf Jagerman', author_email='[email protected]', license='MIT', packages=['lace'] ) ## Instruction: Add correct packages and requirements for python package ## Code After: from setuptools import setup setup( name='lace', version='0.1.1', description='Neural Learning to Rank using Chainer', url='https://github.com/rjagerman/lace', download_url = 'https://github.com/rjagerman/lace/archive/v0.1.1.tar.gz', author='Rolf Jagerman', author_email='[email protected]', license='MIT', packages=['lace', 'lace.functions', 'lace.loss'], install_requires=['numpy>=1.12.0', 'chainer>=2.0.0'], tests_require=['nose'] )
... author='Rolf Jagerman', author_email='[email protected]', license='MIT', packages=['lace', 'lace.functions', 'lace.loss'], install_requires=['numpy>=1.12.0', 'chainer>=2.0.0'], tests_require=['nose'] ) ...
ea5bfe240cc349144e089f606534726863f2c21b
media/sites/lapinkansa.py
media/sites/lapinkansa.py
import requests from bs4 import BeautifulSoup def nouda( url , out ): r = requests.get( url ) r.encoding = 'UTF-8' soup = BeautifulSoup( r.text ) teksti = soup.find_all( class_ ='post-meta' ) for string in teksti[0].stripped_strings: out.write( string.encode('utf8') + ' ' ) if __name__ == '__main__': nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
import requests from bs4 import BeautifulSoup def nouda( url , out ): r = requests.get( url ) r.encoding = 'UTF-8' soup = BeautifulSoup( r.text ) teksti = soup.find_all( class_ = 'news-excerpt' ) for string in teksti[0].stripped_strings: out.write( string.encode('utf8') + ' ' ) if __name__ == '__main__': nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
Fix to new page layout
Fix to new page layout
Python
mit
HIIT/digivaalit-2015,HIIT/digivaalit-2015,HIIT/digivaalit-2015
python
## Code Before: import requests from bs4 import BeautifulSoup def nouda( url , out ): r = requests.get( url ) r.encoding = 'UTF-8' soup = BeautifulSoup( r.text ) teksti = soup.find_all( class_ ='post-meta' ) for string in teksti[0].stripped_strings: out.write( string.encode('utf8') + ' ' ) if __name__ == '__main__': nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w')) ## Instruction: Fix to new page layout ## Code After: import requests from bs4 import BeautifulSoup def nouda( url , out ): r = requests.get( url ) r.encoding = 'UTF-8' soup = BeautifulSoup( r.text ) teksti = soup.find_all( class_ = 'news-excerpt' ) for string in teksti[0].stripped_strings: out.write( string.encode('utf8') + ' ' ) if __name__ == '__main__': nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
// ... existing code ... r.encoding = 'UTF-8' soup = BeautifulSoup( r.text ) teksti = soup.find_all( class_ = 'news-excerpt' ) for string in teksti[0].stripped_strings: out.write( string.encode('utf8') + ' ' ) // ... rest of the code ...
ae433a0ed222d3540581b2b49c9a49a8ad16819c
wagtailaltgenerator/translation_providers/google_translate.py
wagtailaltgenerator/translation_providers/google_translate.py
import logging from . import AbstractTranslationProvider from google.cloud import translate logger = logging.getLogger(__name__) class GoogleTranslate(AbstractTranslationProvider): def translate(self, strings, target_language, source_language="en"): client = translate.Client() response = client.translate( strings, source_language=source_language, target_language=target_language ) return list(map(lambda x: x["translatedText"], response))
import logging from . import AbstractTranslationProvider logger = logging.getLogger(__name__) class GoogleTranslate(AbstractTranslationProvider): def translate(self, strings, target_language, source_language="en"): from google.cloud import translate client = translate.Client() response = client.translate( strings, source_language=source_language, target_language=target_language ) return list(map(lambda x: x["translatedText"], response))
Enable test mocking for translate
Enable test mocking for translate
Python
mit
marteinn/wagtail-alt-generator,marteinn/wagtail-alt-generator,marteinn/wagtail-alt-generator
python
## Code Before: import logging from . import AbstractTranslationProvider from google.cloud import translate logger = logging.getLogger(__name__) class GoogleTranslate(AbstractTranslationProvider): def translate(self, strings, target_language, source_language="en"): client = translate.Client() response = client.translate( strings, source_language=source_language, target_language=target_language ) return list(map(lambda x: x["translatedText"], response)) ## Instruction: Enable test mocking for translate ## Code After: import logging from . import AbstractTranslationProvider logger = logging.getLogger(__name__) class GoogleTranslate(AbstractTranslationProvider): def translate(self, strings, target_language, source_language="en"): from google.cloud import translate client = translate.Client() response = client.translate( strings, source_language=source_language, target_language=target_language ) return list(map(lambda x: x["translatedText"], response))
... import logging from . import AbstractTranslationProvider logger = logging.getLogger(__name__) ... class GoogleTranslate(AbstractTranslationProvider): def translate(self, strings, target_language, source_language="en"): from google.cloud import translate client = translate.Client() response = client.translate( strings, source_language=source_language, target_language=target_language ...
6d11ff1845db8720bdb299c3d53008f6297ab037
src/main/java/com/fundynamic/d2tm/math/Random.java
src/main/java/com/fundynamic/d2tm/math/Random.java
package com.fundynamic.d2tm.math; import com.fundynamic.d2tm.game.map.Cell; import com.fundynamic.d2tm.game.terrain.Terrain; import java.util.List; public class Random { /** * Returns value between min *until* max, not including max. * * Example, if you want a number between 0 or 1, you do: getRandomBetween(0, 2) * * @param min * @param max * @return */ public static int getRandomBetween(int min, int max) { final int maxForRandom = max - min; return (min + getInt(maxForRandom)); } public static int getInt(int max) { return (int) (Math.random() * max); } /** * Given a list of T, returns a random item from the list. * @param list * @param <T> * @return */ public static <T> T getRandomItem(List<T> list) { if (list.isEmpty()) return null; return list.get(getInt(list.size())); } }
package com.fundynamic.d2tm.math; import java.util.List; public class Random { private static java.util.Random random = new java.util.Random(); /** * Returns value between min *until* max, not including max. * * Example, if you want a number between 0 or 1, you do: getRandomBetween(0, 2) * * @param min * @param max * @return */ public static int getRandomBetween(int min, int max) { final int maxForRandom = max - min; return (min + getInt(maxForRandom)); } public static int getInt(int max) { return random.nextInt(max); } /** * Given a list of T, returns a random item from the list. * @param list * @param <T> * @return */ public static <T> T getRandomItem(List<T> list) { if (list.isEmpty()) return null; return list.get(getInt(list.size())); } }
Make it more random as suggested by Mark Platvoet
Make it more random as suggested by Mark Platvoet
Java
mit
Fundynamic/dune2themaker4j,Fundynamic/dune2themaker4j
java
## Code Before: package com.fundynamic.d2tm.math; import com.fundynamic.d2tm.game.map.Cell; import com.fundynamic.d2tm.game.terrain.Terrain; import java.util.List; public class Random { /** * Returns value between min *until* max, not including max. * * Example, if you want a number between 0 or 1, you do: getRandomBetween(0, 2) * * @param min * @param max * @return */ public static int getRandomBetween(int min, int max) { final int maxForRandom = max - min; return (min + getInt(maxForRandom)); } public static int getInt(int max) { return (int) (Math.random() * max); } /** * Given a list of T, returns a random item from the list. * @param list * @param <T> * @return */ public static <T> T getRandomItem(List<T> list) { if (list.isEmpty()) return null; return list.get(getInt(list.size())); } } ## Instruction: Make it more random as suggested by Mark Platvoet ## Code After: package com.fundynamic.d2tm.math; import java.util.List; public class Random { private static java.util.Random random = new java.util.Random(); /** * Returns value between min *until* max, not including max. * * Example, if you want a number between 0 or 1, you do: getRandomBetween(0, 2) * * @param min * @param max * @return */ public static int getRandomBetween(int min, int max) { final int maxForRandom = max - min; return (min + getInt(maxForRandom)); } public static int getInt(int max) { return random.nextInt(max); } /** * Given a list of T, returns a random item from the list. * @param list * @param <T> * @return */ public static <T> T getRandomItem(List<T> list) { if (list.isEmpty()) return null; return list.get(getInt(list.size())); } }
# ... existing code ... package com.fundynamic.d2tm.math; import java.util.List; public class Random { private static java.util.Random random = new java.util.Random(); /** * Returns value between min *until* max, not including max. * # ... modified code ... } public static int getInt(int max) { return random.nextInt(max); } /** # ... rest of the code ...
a0aa74d9e6295e34f02b4eefd76e7eb9a1e6425f
node/floor_divide.py
node/floor_divide.py
from nodes import Node class FloorDiv(Node): char = "f" args = 2 results = 1 @Node.test_func([3,2], [1]) @Node.test_func([6,-3], [-2]) def func(self, a:Node.number,b:Node.number): """a/b. Rounds down, returns an int.""" return a//b @Node.test_func(["test", "e"], [["t", "e", "st"]]) def partition(self, string:str, sep:str): """Split the string at the first occurrence of sep, return a 3-list containing the part before the separator, the separator itself, and the part after the separator. If the separator is not found, return a 3-list containing the string itself, followed by two empty strings.""" return [list(string.partition(sep))]
from nodes import Node class FloorDiv(Node): char = "f" args = 2 results = 1 @Node.test_func([3,2], [1]) @Node.test_func([6,-3], [-2]) def func(self, a:Node.number,b:Node.number): """a/b. Rounds down, returns an int.""" return a//b @Node.test_func(["test", "e"], [["t", "e", "st"]]) def partition(self, string:str, sep:str): """Split the string at the first occurrence of sep, return a 3-list containing the part before the separator, the separator itself, and the part after the separator. If the separator is not found, return a 3-list containing the string itself, followed by two empty strings.""" return [list(string.partition(sep))] @Node.test_func(["134", 1], [["134"]]) @Node.test_func(["1234", 2], [["12", "34"]]) @Node.test_func(["1234", 3], [["1", "2", "34"]]) @Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]]) def chunk(self, inp:Node.indexable, num:Node.number): """Return inp seperated into num groups""" rtn = [] last = 0 size = len(inp)//num for i in range(size, len(inp), size): rtn.append(inp[last:i]) last = i if len(rtn) != num: rtn.append(inp[last:]) else: rtn[-1] += inp[last:] if len(rtn): if isinstance(inp, str): rtn[-1] = "".join(rtn[-1]) else: rtn[-1] = type(inp)(rtn[-1]) return [rtn]
Add a group chunk, chunks a list into N groups
Add a group chunk, chunks a list into N groups
Python
mit
muddyfish/PYKE,muddyfish/PYKE
python
## Code Before: from nodes import Node class FloorDiv(Node): char = "f" args = 2 results = 1 @Node.test_func([3,2], [1]) @Node.test_func([6,-3], [-2]) def func(self, a:Node.number,b:Node.number): """a/b. Rounds down, returns an int.""" return a//b @Node.test_func(["test", "e"], [["t", "e", "st"]]) def partition(self, string:str, sep:str): """Split the string at the first occurrence of sep, return a 3-list containing the part before the separator, the separator itself, and the part after the separator. If the separator is not found, return a 3-list containing the string itself, followed by two empty strings.""" return [list(string.partition(sep))] ## Instruction: Add a group chunk, chunks a list into N groups ## Code After: from nodes import Node class FloorDiv(Node): char = "f" args = 2 results = 1 @Node.test_func([3,2], [1]) @Node.test_func([6,-3], [-2]) def func(self, a:Node.number,b:Node.number): """a/b. Rounds down, returns an int.""" return a//b @Node.test_func(["test", "e"], [["t", "e", "st"]]) def partition(self, string:str, sep:str): """Split the string at the first occurrence of sep, return a 3-list containing the part before the separator, the separator itself, and the part after the separator. If the separator is not found, return a 3-list containing the string itself, followed by two empty strings.""" return [list(string.partition(sep))] @Node.test_func(["134", 1], [["134"]]) @Node.test_func(["1234", 2], [["12", "34"]]) @Node.test_func(["1234", 3], [["1", "2", "34"]]) @Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]]) def chunk(self, inp:Node.indexable, num:Node.number): """Return inp seperated into num groups""" rtn = [] last = 0 size = len(inp)//num for i in range(size, len(inp), size): rtn.append(inp[last:i]) last = i if len(rtn) != num: rtn.append(inp[last:]) else: rtn[-1] += inp[last:] if len(rtn): if isinstance(inp, str): rtn[-1] = "".join(rtn[-1]) else: rtn[-1] = type(inp)(rtn[-1]) return [rtn]
// ... existing code ... return a 3-list containing the string itself, followed by two empty strings.""" return [list(string.partition(sep))] @Node.test_func(["134", 1], [["134"]]) @Node.test_func(["1234", 2], [["12", "34"]]) @Node.test_func(["1234", 3], [["1", "2", "34"]]) @Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]]) def chunk(self, inp:Node.indexable, num:Node.number): """Return inp seperated into num groups""" rtn = [] last = 0 size = len(inp)//num for i in range(size, len(inp), size): rtn.append(inp[last:i]) last = i if len(rtn) != num: rtn.append(inp[last:]) else: rtn[-1] += inp[last:] if len(rtn): if isinstance(inp, str): rtn[-1] = "".join(rtn[-1]) else: rtn[-1] = type(inp)(rtn[-1]) return [rtn] // ... rest of the code ...
01a71f10f94d9e7b7c90d19540df8015455ae2ad
commands/say.py
commands/say.py
from CommandTemplate import CommandTemplate class Command(CommandTemplate): triggers = ['say', 'do'] helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')" adminOnly = True showInCommandList = False def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength): if msgPartsLength < 3: bot.say(target, "Please provide both a channel or user name to say something to, and the text to say") #Check if we're in the channel we have to say something to elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList: bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.") #Nothing's stopping us now! Say it! else: messageToSay = " ".join(msgParts[2:]) if triggerInMsg == 'say': bot.say(msgParts[1], messageToSay) elif triggerInMsg == 'do': bot.doAction(msgParts[1], messageToSay)
from CommandTemplate import CommandTemplate class Command(CommandTemplate): triggers = ['say', 'do', 'notice'] helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')" adminOnly = True showInCommandList = False def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength): if msgPartsLength < 3: bot.say(target, "Please provide both a channel or user name to say something to, and the text to say") #Check if we're in the channel we have to say something to elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList: bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.") #Nothing's stopping us now! Say it! else: messageToSay = " ".join(msgParts[2:]) messageType = 'say' if triggerInMsg == 'do': messageType = 'action' elif triggerInMsg == 'notice': messageType = 'notice' bot.sendMessage(msgParts[1], messageToSay, messageType)
Move to sendMessage command for message sending. Also add 'notice' trigger, because why not
Move to sendMessage command for message sending. Also add 'notice' trigger, because why not
Python
mit
Didero/DideRobot
python
## Code Before: from CommandTemplate import CommandTemplate class Command(CommandTemplate): triggers = ['say', 'do'] helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')" adminOnly = True showInCommandList = False def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength): if msgPartsLength < 3: bot.say(target, "Please provide both a channel or user name to say something to, and the text to say") #Check if we're in the channel we have to say something to elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList: bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.") #Nothing's stopping us now! Say it! else: messageToSay = " ".join(msgParts[2:]) if triggerInMsg == 'say': bot.say(msgParts[1], messageToSay) elif triggerInMsg == 'do': bot.doAction(msgParts[1], messageToSay) ## Instruction: Move to sendMessage command for message sending. Also add 'notice' trigger, because why not ## Code After: from CommandTemplate import CommandTemplate class Command(CommandTemplate): triggers = ['say', 'do', 'notice'] helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')" adminOnly = True showInCommandList = False def execute(self, bot, user, target, triggerInMsg, msg, msgWithoutFirstWord, msgParts, msgPartsLength): if msgPartsLength < 3: bot.say(target, "Please provide both a channel or user name to say something to, and the text to say") #Check if we're in the channel we have to say something to elif msgParts[1].startswith('#') and msgParts[1] not in bot.channelsUserList: bot.say(target, "I'm not in that channel, so I can't say anything in there, sorry.") #Nothing's stopping us now! Say it! else: messageToSay = " ".join(msgParts[2:]) messageType = 'say' if triggerInMsg == 'do': messageType = 'action' elif triggerInMsg == 'notice': messageType = 'notice' bot.sendMessage(msgParts[1], messageToSay, messageType)
... from CommandTemplate import CommandTemplate class Command(CommandTemplate): triggers = ['say', 'do', 'notice'] helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')" adminOnly = True showInCommandList = False ... #Nothing's stopping us now! Say it! else: messageToSay = " ".join(msgParts[2:]) messageType = 'say' if triggerInMsg == 'do': messageType = 'action' elif triggerInMsg == 'notice': messageType = 'notice' bot.sendMessage(msgParts[1], messageToSay, messageType) ...
3ae713ff276dc79e1d5fbb2e5ad9ea5a2e4f2596
src/main/java/pro/beam/api/BeamAPI.java
src/main/java/pro/beam/api/BeamAPI.java
package pro.beam.api; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import pro.beam.api.http.BeamHttpClient; import pro.beam.api.services.AbstractBeamService; import pro.beam.api.services.ServiceManager; import pro.beam.api.services.impl.*; import pro.beam.api.services.impl.UsersService; import java.net.URI; import java.util.concurrent.Executors; public class BeamAPI { public static final URI BASE_PATH = URI.create("https://beam.pro/api/v1/"); public final Gson gson; public final BeamHttpClient http; public final ListeningExecutorService executor; protected final ServiceManager services; public BeamAPI() { this.gson = new GsonBuilder().create(); this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)); this.http = new BeamHttpClient(this); this.services = new ServiceManager(); this.register(new UsersService(this)); this.register(new ChatService(this)); this.register(new ChannelsService(this)); } public <T extends AbstractBeamService> T use(Class<T> service) { return this.services.get(service); } public boolean register(AbstractBeamService service) { return this.services.register(service); } }
package pro.beam.api; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import pro.beam.api.http.BeamHttpClient; import pro.beam.api.services.AbstractBeamService; import pro.beam.api.services.ServiceManager; import pro.beam.api.services.impl.*; import pro.beam.api.services.impl.UsersService; import java.net.URI; import java.util.concurrent.Executors; public class BeamAPI { public static final URI BASE_PATH = URI.create("https://beam.pro/api/v1/"); public final Gson gson; public final BeamHttpClient http; public final ListeningExecutorService executor; protected final ServiceManager services; public BeamAPI() { this.gson = new GsonBuilder() .setDateFormat("yyyy-MM-dd HH:mm:ss") .create(); this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)); this.http = new BeamHttpClient(this); this.services = new ServiceManager(); this.register(new UsersService(this)); this.register(new ChatService(this)); this.register(new ChannelsService(this)); } public <T extends AbstractBeamService> T use(Class<T> service) { return this.services.get(service); } public boolean register(AbstractBeamService service) { return this.services.register(service); } }
Update API's GSON date format to match the API spec
Update API's GSON date format to match the API spec
Java
mit
WatchBeam/beam-client-java
java
## Code Before: package pro.beam.api; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import pro.beam.api.http.BeamHttpClient; import pro.beam.api.services.AbstractBeamService; import pro.beam.api.services.ServiceManager; import pro.beam.api.services.impl.*; import pro.beam.api.services.impl.UsersService; import java.net.URI; import java.util.concurrent.Executors; public class BeamAPI { public static final URI BASE_PATH = URI.create("https://beam.pro/api/v1/"); public final Gson gson; public final BeamHttpClient http; public final ListeningExecutorService executor; protected final ServiceManager services; public BeamAPI() { this.gson = new GsonBuilder().create(); this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)); this.http = new BeamHttpClient(this); this.services = new ServiceManager(); this.register(new UsersService(this)); this.register(new ChatService(this)); this.register(new ChannelsService(this)); } public <T extends AbstractBeamService> T use(Class<T> service) { return this.services.get(service); } public boolean register(AbstractBeamService service) { return this.services.register(service); } } ## Instruction: Update API's GSON date format to match the API spec ## Code After: package pro.beam.api; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import pro.beam.api.http.BeamHttpClient; import pro.beam.api.services.AbstractBeamService; import pro.beam.api.services.ServiceManager; import pro.beam.api.services.impl.*; import pro.beam.api.services.impl.UsersService; import java.net.URI; import java.util.concurrent.Executors; public class BeamAPI { public static final URI BASE_PATH = URI.create("https://beam.pro/api/v1/"); public final Gson gson; public final BeamHttpClient http; public final ListeningExecutorService executor; protected final ServiceManager services; public BeamAPI() { this.gson = new GsonBuilder() .setDateFormat("yyyy-MM-dd HH:mm:ss") .create(); this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)); this.http = new BeamHttpClient(this); this.services = new ServiceManager(); this.register(new UsersService(this)); this.register(new ChatService(this)); this.register(new ChannelsService(this)); } public <T extends AbstractBeamService> T use(Class<T> service) { return this.services.get(service); } public boolean register(AbstractBeamService service) { return this.services.register(service); } }
# ... existing code ... protected final ServiceManager services; public BeamAPI() { this.gson = new GsonBuilder() .setDateFormat("yyyy-MM-dd HH:mm:ss") .create(); this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)); this.http = new BeamHttpClient(this); # ... rest of the code ...
1d2237655ef0ba225e6fa0b8d0959ed6b3e75726
runtests.py
runtests.py
# Third party imports import pytest def main(): """ Run pytest tests. """ errno = pytest.main(['-x', 'spyder_terminal', '-v', '-rw', '--durations=10', '--cov=spyder_terminal', '--cov-report=term-missing']) # sys.exit doesn't work here because some things could be running # in the background (e.g. closing the main window) when this point # is reached. And if that's the case, sys.exit does't stop the # script (as you would expected). if errno != 0: raise SystemExit(errno) if __name__ == '__main__': main()
# Third party imports import pytest def main(): """ Run pytest tests. """ errno = pytest.main(['-x', 'spyder_terminal', '-v', '-rw', '--durations=10', '--cov=spyder_terminal', '--cov-report=term-missing', '--timeout=20']) # sys.exit doesn't work here because some things could be running # in the background (e.g. closing the main window) when this point # is reached. And if that's the case, sys.exit does't stop the # script (as you would expected). if errno != 0: raise SystemExit(errno) if __name__ == '__main__': main()
Add timeout to all tests
Add timeout to all tests
Python
mit
spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal
python
## Code Before: # Third party imports import pytest def main(): """ Run pytest tests. """ errno = pytest.main(['-x', 'spyder_terminal', '-v', '-rw', '--durations=10', '--cov=spyder_terminal', '--cov-report=term-missing']) # sys.exit doesn't work here because some things could be running # in the background (e.g. closing the main window) when this point # is reached. And if that's the case, sys.exit does't stop the # script (as you would expected). if errno != 0: raise SystemExit(errno) if __name__ == '__main__': main() ## Instruction: Add timeout to all tests ## Code After: # Third party imports import pytest def main(): """ Run pytest tests. """ errno = pytest.main(['-x', 'spyder_terminal', '-v', '-rw', '--durations=10', '--cov=spyder_terminal', '--cov-report=term-missing', '--timeout=20']) # sys.exit doesn't work here because some things could be running # in the background (e.g. closing the main window) when this point # is reached. And if that's the case, sys.exit does't stop the # script (as you would expected). if errno != 0: raise SystemExit(errno) if __name__ == '__main__': main()
// ... existing code ... """ errno = pytest.main(['-x', 'spyder_terminal', '-v', '-rw', '--durations=10', '--cov=spyder_terminal', '--cov-report=term-missing', '--timeout=20']) # sys.exit doesn't work here because some things could be running # in the background (e.g. closing the main window) when this point // ... rest of the code ...
fbd6d2a7eb11bcad1638cd396a73e98f1717255f
junit-jupiter-migration-support/src/main/java/org/junit/jupiter/migrationsupport/rules/member/RuleAnnotatedField.java
junit-jupiter-migration-support/src/main/java/org/junit/jupiter/migrationsupport/rules/member/RuleAnnotatedField.java
/* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.jupiter.migrationsupport.rules.member; import static org.junit.platform.commons.meta.API.Usage.Internal; import java.lang.reflect.Field; import java.util.logging.Logger; import org.junit.platform.commons.meta.API; import org.junit.rules.TestRule; @API(Internal) public class RuleAnnotatedField extends AbstractRuleAnnotatedMember { private static final Logger LOG = Logger.getLogger(RuleAnnotatedField.class.getName()); public RuleAnnotatedField(Object testInstance, Field testRuleField) { try { testRuleField.setAccessible(true); this.testRuleInstance = (TestRule) testRuleField.get(testInstance); } catch (IllegalAccessException exception) { LOG.warning(exception.getMessage()); } } }
/* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.jupiter.migrationsupport.rules.member; import static org.junit.platform.commons.meta.API.Usage.Internal; import java.lang.reflect.Field; import java.util.logging.Logger; import org.junit.platform.commons.meta.API; import org.junit.platform.commons.util.ExceptionUtils; import org.junit.rules.TestRule; @API(Internal) public class RuleAnnotatedField extends AbstractRuleAnnotatedMember { private static final Logger LOG = Logger.getLogger(RuleAnnotatedField.class.getName()); public RuleAnnotatedField(Object testInstance, Field testRuleField) { try { testRuleField.setAccessible(true); this.testRuleInstance = (TestRule) testRuleField.get(testInstance); } catch (IllegalAccessException exception) { throw ExceptionUtils.throwAsUncheckedException(exception); } } }
Throw IllegalAccessException like in RuleAnnotatedMethod
Throw IllegalAccessException like in RuleAnnotatedMethod
Java
epl-1.0
junit-team/junit-lambda,sbrannen/junit-lambda
java
## Code Before: /* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.jupiter.migrationsupport.rules.member; import static org.junit.platform.commons.meta.API.Usage.Internal; import java.lang.reflect.Field; import java.util.logging.Logger; import org.junit.platform.commons.meta.API; import org.junit.rules.TestRule; @API(Internal) public class RuleAnnotatedField extends AbstractRuleAnnotatedMember { private static final Logger LOG = Logger.getLogger(RuleAnnotatedField.class.getName()); public RuleAnnotatedField(Object testInstance, Field testRuleField) { try { testRuleField.setAccessible(true); this.testRuleInstance = (TestRule) testRuleField.get(testInstance); } catch (IllegalAccessException exception) { LOG.warning(exception.getMessage()); } } } ## Instruction: Throw IllegalAccessException like in RuleAnnotatedMethod ## Code After: /* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.jupiter.migrationsupport.rules.member; import static org.junit.platform.commons.meta.API.Usage.Internal; import java.lang.reflect.Field; import java.util.logging.Logger; import org.junit.platform.commons.meta.API; import org.junit.platform.commons.util.ExceptionUtils; import org.junit.rules.TestRule; @API(Internal) public class RuleAnnotatedField extends AbstractRuleAnnotatedMember { private static final Logger LOG = Logger.getLogger(RuleAnnotatedField.class.getName()); public RuleAnnotatedField(Object testInstance, Field testRuleField) { try { testRuleField.setAccessible(true); this.testRuleInstance = (TestRule) testRuleField.get(testInstance); } catch (IllegalAccessException exception) { throw ExceptionUtils.throwAsUncheckedException(exception); } } }
// ... existing code ... import java.util.logging.Logger; import org.junit.platform.commons.meta.API; import org.junit.platform.commons.util.ExceptionUtils; import org.junit.rules.TestRule; @API(Internal) // ... modified code ... this.testRuleInstance = (TestRule) testRuleField.get(testInstance); } catch (IllegalAccessException exception) { throw ExceptionUtils.throwAsUncheckedException(exception); } } // ... rest of the code ...
517f53dc91164f4249de9dbaf31be65df02ffde7
numpy/fft/setup.py
numpy/fft/setup.py
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('fft', parent_package, top_path) config.add_data_dir('tests') # Configure pocketfft_internal config.add_extension('_pocketfft_internal', sources=['_pocketfft.c'] ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('fft', parent_package, top_path) config.add_data_dir('tests') # AIX needs to be told to use large file support - at all times defs = [('_LARGE_FILES', None)] if sys.platform[:3] == "aix" else [] # Configure pocketfft_internal config.add_extension('_pocketfft_internal', sources=['_pocketfft.c'], define_macros=defs, ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
Add _LARGE_FILES to def_macros[] when platform is AIX (gh-15938)
BUG: Add _LARGE_FILES to def_macros[] when platform is AIX (gh-15938) AIX needs to be told to use large file support at all times. Fixes parts of gh-15801.
Python
bsd-3-clause
mhvk/numpy,pbrod/numpy,mattip/numpy,anntzer/numpy,numpy/numpy,numpy/numpy,pbrod/numpy,rgommers/numpy,numpy/numpy,endolith/numpy,abalkin/numpy,pdebuyl/numpy,grlee77/numpy,mattip/numpy,grlee77/numpy,pdebuyl/numpy,charris/numpy,charris/numpy,seberg/numpy,pbrod/numpy,grlee77/numpy,anntzer/numpy,jakirkham/numpy,seberg/numpy,rgommers/numpy,simongibbons/numpy,pdebuyl/numpy,madphysicist/numpy,endolith/numpy,abalkin/numpy,anntzer/numpy,mhvk/numpy,mhvk/numpy,jakirkham/numpy,simongibbons/numpy,mattip/numpy,simongibbons/numpy,grlee77/numpy,numpy/numpy,madphysicist/numpy,seberg/numpy,pbrod/numpy,charris/numpy,charris/numpy,endolith/numpy,madphysicist/numpy,mhvk/numpy,endolith/numpy,jakirkham/numpy,simongibbons/numpy,abalkin/numpy,madphysicist/numpy,mhvk/numpy,simongibbons/numpy,rgommers/numpy,seberg/numpy,mattip/numpy,grlee77/numpy,anntzer/numpy,jakirkham/numpy,madphysicist/numpy,pbrod/numpy,pdebuyl/numpy,rgommers/numpy,jakirkham/numpy
python
## Code Before: def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('fft', parent_package, top_path) config.add_data_dir('tests') # Configure pocketfft_internal config.add_extension('_pocketfft_internal', sources=['_pocketfft.c'] ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration) ## Instruction: BUG: Add _LARGE_FILES to def_macros[] when platform is AIX (gh-15938) AIX needs to be told to use large file support at all times. Fixes parts of gh-15801. ## Code After: import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('fft', parent_package, top_path) config.add_data_dir('tests') # AIX needs to be told to use large file support - at all times defs = [('_LARGE_FILES', None)] if sys.platform[:3] == "aix" else [] # Configure pocketfft_internal config.add_extension('_pocketfft_internal', sources=['_pocketfft.c'], define_macros=defs, ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
... import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration ... config.add_data_dir('tests') # AIX needs to be told to use large file support - at all times defs = [('_LARGE_FILES', None)] if sys.platform[:3] == "aix" else [] # Configure pocketfft_internal config.add_extension('_pocketfft_internal', sources=['_pocketfft.c'], define_macros=defs, ) return config ...
5aa3dbf8f520f9ffaaed51ed397eb9f4c722882a
sample_app/__init__.py
sample_app/__init__.py
import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
Fix pep8 order import issue
Fix pep8 order import issue
Python
apache-2.0
brantlk/python-sample-app
python
## Code Before: import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application ## Instruction: Fix pep8 order import issue ## Code After: import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
# ... existing code ... import ConfigParser import json import falcon # ... rest of the code ...
ab5edd504789e8fad3dcf0f30b0fbec8608e2abe
django_nyt/urls.py
django_nyt/urls.py
from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import patterns, url urlpatterns = patterns( '', url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'), url( '^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'), url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url( '^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url( '^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'), url('^goto/$', 'django_nyt.views.goto', name='goto_base'),) def get_pattern(app_name="nyt", namespace="nyt"): """Every url resolution takes place as "nyt:view_name". https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces """ return urlpatterns, app_name, namespace
from __future__ import absolute_import from __future__ import unicode_literals from django import VERSION as DJANGO_VERSION from django.conf.urls import url urlpatterns = [ url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'), url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'), url('^goto/$', 'django_nyt.views.goto', name='goto_base'), ] if DJANGO_VERSION < (1, 8): from django.conf.urls import patterns urlpatterns = patterns('', *urlpatterns) def get_pattern(app_name="nyt", namespace="nyt"): """Every url resolution takes place as "nyt:view_name". https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces """ return urlpatterns, app_name, namespace
Use list instead of patterns()
Use list instead of patterns()
Python
apache-2.0
benjaoming/django-nyt,benjaoming/django-nyt
python
## Code Before: from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import patterns, url urlpatterns = patterns( '', url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'), url( '^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'), url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url( '^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url( '^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'), url('^goto/$', 'django_nyt.views.goto', name='goto_base'),) def get_pattern(app_name="nyt", namespace="nyt"): """Every url resolution takes place as "nyt:view_name". https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces """ return urlpatterns, app_name, namespace ## Instruction: Use list instead of patterns() ## Code After: from __future__ import absolute_import from __future__ import unicode_literals from django import VERSION as DJANGO_VERSION from django.conf.urls import url urlpatterns = [ url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'), url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'), url('^goto/$', 'django_nyt.views.goto', name='goto_base'), ] if DJANGO_VERSION < (1, 8): from django.conf.urls import patterns urlpatterns = patterns('', *urlpatterns) def get_pattern(app_name="nyt", namespace="nyt"): """Every url resolution takes place as "nyt:view_name". https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces """ return urlpatterns, app_name, namespace
... from __future__ import absolute_import from __future__ import unicode_literals from django import VERSION as DJANGO_VERSION from django.conf.urls import url urlpatterns = [ url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'), url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'), url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'), url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'), url('^goto/$', 'django_nyt.views.goto', name='goto_base'), ] if DJANGO_VERSION < (1, 8): from django.conf.urls import patterns urlpatterns = patterns('', *urlpatterns) def get_pattern(app_name="nyt", namespace="nyt"): ...
e30d433153d9ad2f1d931f7f48b0ebbe9ba6763c
modules/new_module/new_module.py
modules/new_module/new_module.py
from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module
import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
Add enable and dissable hooks
Add enable and dissable hooks
Python
apache-2.0
UniMOOC/gcb-new-module,UniMOOC/gcb-new-module,UniMOOC/gcb-new-module,UniMOOC/gcb-new-module
python
## Code Before: from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module ## Instruction: Add enable and dissable hooks ## Code After: import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
// ... existing code ... import logging from models import custom_modules // ... modified code ... def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ... custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module // ... rest of the code ...
b30bf6f1b862ae8386bad33a608785024f5ed69a
src/main/kotlin/org/team2471/frc/lib/util/MiscUtils.kt
src/main/kotlin/org/team2471/frc/lib/util/MiscUtils.kt
package org.team2471.frc.lib.util import edu.wpi.first.wpilibj.Timer /** * Executes the given block and returns elapsed time in seconds. */ fun measureTimeFPGA(body: () -> Unit): Double { val start = Timer.getFPGATimestamp() body() return Timer.getFPGATimestamp() - start } fun Double.deadband(tolerance: Double) = if(Math.abs(this) < tolerance) { 0.0 } else { this * (1+tolerance) - tolerance // scale }
package org.team2471.frc.lib.util import edu.wpi.first.wpilibj.Timer /** * Executes the given block and returns elapsed time in seconds. */ fun measureTimeFPGA(body: () -> Unit): Double { val start = Timer.getFPGATimestamp() body() return Timer.getFPGATimestamp() - start } fun Double.deadband(tolerance: Double) = if(Math.abs(this) < tolerance) { 0.0 } else { this }
Update meanlib to fix deadband scaling
Update meanlib to fix deadband scaling
Kotlin
unlicense
TeamMeanMachine/meanlib,TeamMeanMachine/meanlib
kotlin
## Code Before: package org.team2471.frc.lib.util import edu.wpi.first.wpilibj.Timer /** * Executes the given block and returns elapsed time in seconds. */ fun measureTimeFPGA(body: () -> Unit): Double { val start = Timer.getFPGATimestamp() body() return Timer.getFPGATimestamp() - start } fun Double.deadband(tolerance: Double) = if(Math.abs(this) < tolerance) { 0.0 } else { this * (1+tolerance) - tolerance // scale } ## Instruction: Update meanlib to fix deadband scaling ## Code After: package org.team2471.frc.lib.util import edu.wpi.first.wpilibj.Timer /** * Executes the given block and returns elapsed time in seconds. */ fun measureTimeFPGA(body: () -> Unit): Double { val start = Timer.getFPGATimestamp() body() return Timer.getFPGATimestamp() - start } fun Double.deadband(tolerance: Double) = if(Math.abs(this) < tolerance) { 0.0 } else { this }
# ... existing code ... fun Double.deadband(tolerance: Double) = if(Math.abs(this) < tolerance) { 0.0 } else { this } # ... rest of the code ...
e858be9072b175545e17631ccd838f9f7d8a7e21
tensorflow_datasets/dataset_collections/longt5/longt5.py
tensorflow_datasets/dataset_collections/longt5/longt5.py
"""Long T5 dataset collection.""" import collections from typing import Mapping from tensorflow_datasets.core import dataset_collection_builder from tensorflow_datasets.core import naming class Longt5(dataset_collection_builder.DatasetCollection): """Long T5 dataset collection.""" @property def info(self) -> dataset_collection_builder.DatasetCollectionInfo: return dataset_collection_builder.DatasetCollectionInfo.from_cls( dataset_collection_class=self.__class__, release_notes={ "1.0.0": "Initial release", }, ) @property def datasets(self,) -> Mapping[str, Mapping[str, naming.DatasetReference]]: return collections.OrderedDict({ "1.0.0": naming.references_for({ "natural_questions": "natural_questions/longt5:0.1.0", "media_sum": "media_sum:1.0.0", }) })
"""Long T5 dataset collection.""" import collections from typing import Mapping from tensorflow_datasets.core import dataset_collection_builder from tensorflow_datasets.core import naming class Longt5(dataset_collection_builder.DatasetCollection): """Long T5 dataset collection.""" @property def info(self) -> dataset_collection_builder.DatasetCollectionInfo: return dataset_collection_builder.DatasetCollectionInfo.from_cls( dataset_collection_class=self.__class__, release_notes={ "1.0.0": "Initial release", }, homepage="https://github.com/google-research/longt5", ) @property def datasets(self,) -> Mapping[str, Mapping[str, naming.DatasetReference]]: return collections.OrderedDict({ "1.0.0": naming.references_for({ "natural_questions": "natural_questions/longt5:0.1.0", "media_sum": "media_sum:1.0.0", }) })
Add homepage to LongT5 dataset collection
Add homepage to LongT5 dataset collection PiperOrigin-RevId: 479013251
Python
apache-2.0
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
python
## Code Before: """Long T5 dataset collection.""" import collections from typing import Mapping from tensorflow_datasets.core import dataset_collection_builder from tensorflow_datasets.core import naming class Longt5(dataset_collection_builder.DatasetCollection): """Long T5 dataset collection.""" @property def info(self) -> dataset_collection_builder.DatasetCollectionInfo: return dataset_collection_builder.DatasetCollectionInfo.from_cls( dataset_collection_class=self.__class__, release_notes={ "1.0.0": "Initial release", }, ) @property def datasets(self,) -> Mapping[str, Mapping[str, naming.DatasetReference]]: return collections.OrderedDict({ "1.0.0": naming.references_for({ "natural_questions": "natural_questions/longt5:0.1.0", "media_sum": "media_sum:1.0.0", }) }) ## Instruction: Add homepage to LongT5 dataset collection PiperOrigin-RevId: 479013251 ## Code After: """Long T5 dataset collection.""" import collections from typing import Mapping from tensorflow_datasets.core import dataset_collection_builder from tensorflow_datasets.core import naming class Longt5(dataset_collection_builder.DatasetCollection): """Long T5 dataset collection.""" @property def info(self) -> dataset_collection_builder.DatasetCollectionInfo: return dataset_collection_builder.DatasetCollectionInfo.from_cls( dataset_collection_class=self.__class__, release_notes={ "1.0.0": "Initial release", }, homepage="https://github.com/google-research/longt5", ) @property def datasets(self,) -> Mapping[str, Mapping[str, naming.DatasetReference]]: return collections.OrderedDict({ "1.0.0": naming.references_for({ "natural_questions": "natural_questions/longt5:0.1.0", "media_sum": "media_sum:1.0.0", }) })
# ... existing code ... release_notes={ "1.0.0": "Initial release", }, homepage="https://github.com/google-research/longt5", ) @property # ... rest of the code ...
28c98e6d6855aa203389483844df8222dc56fafd
src/main/java/me/nallar/javatransformer/internal/util/CollectionUtil.java
src/main/java/me/nallar/javatransformer/internal/util/CollectionUtil.java
package me.nallar.javatransformer.internal.util; import lombok.experimental.UtilityClass; import java.util.*; import java.util.stream.*; @UtilityClass public class CollectionUtil { @SafeVarargs @SuppressWarnings("varargs") public static <T> Stream<T> union(Collection<T>... collections) { return union(Arrays.asList(collections)); } public static <T> Stream<T> union(Collection<Collection<T>> collections) { return collections.stream().flatMap(Collection::stream); } }
package me.nallar.javatransformer.internal.util; import lombok.experimental.UtilityClass; import java.util.*; import java.util.stream.*; @UtilityClass public class CollectionUtil { @SafeVarargs @SuppressWarnings("varargs") public static <T> Stream<T> union(Collection<T>... collections) { return union(Arrays.asList(collections)); } public static <T> Stream<T> union(Collection<Collection<T>> collections) { return collections.stream().flatMap(x -> x == null ? Stream.empty() : x.stream()); } }
Handle null collections as empty in union(Collection)
Handle null collections as empty in union(Collection)
Java
mit
nallar/JavaTransformer
java
## Code Before: package me.nallar.javatransformer.internal.util; import lombok.experimental.UtilityClass; import java.util.*; import java.util.stream.*; @UtilityClass public class CollectionUtil { @SafeVarargs @SuppressWarnings("varargs") public static <T> Stream<T> union(Collection<T>... collections) { return union(Arrays.asList(collections)); } public static <T> Stream<T> union(Collection<Collection<T>> collections) { return collections.stream().flatMap(Collection::stream); } } ## Instruction: Handle null collections as empty in union(Collection) ## Code After: package me.nallar.javatransformer.internal.util; import lombok.experimental.UtilityClass; import java.util.*; import java.util.stream.*; @UtilityClass public class CollectionUtil { @SafeVarargs @SuppressWarnings("varargs") public static <T> Stream<T> union(Collection<T>... collections) { return union(Arrays.asList(collections)); } public static <T> Stream<T> union(Collection<Collection<T>> collections) { return collections.stream().flatMap(x -> x == null ? Stream.empty() : x.stream()); } }
// ... existing code ... } public static <T> Stream<T> union(Collection<Collection<T>> collections) { return collections.stream().flatMap(x -> x == null ? Stream.empty() : x.stream()); } } // ... rest of the code ...
f93727ecab71421bcfc4c2762c055d018dd7cd38
src/test/java/com/github/mlk/junit/rules/HttpDynamoDbRuleTest.java
src/test/java/com/github/mlk/junit/rules/HttpDynamoDbRuleTest.java
package com.github.mlk.junit.rules; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import com.github.mlk.junit.rules.helpers.dynamodb.DynamoExample; import java.util.UUID; import org.junit.Rule; import org.junit.Test; public class HttpDynamoDbRuleTest { @Rule public HttpDynamoDbRule subject = new HttpDynamoDbRule(); @Test public void getterSetterTest() { String randomValue = UUID.randomUUID().toString(); DynamoExample exampleClient = new DynamoExample(AmazonDynamoDBClientBuilder .standard() .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("anything", "anything"))) .withEndpointConfiguration(new EndpointConfiguration(subject.getEndpoint(), "eu-west-1")) .build()); exampleClient.createTable(); exampleClient.setValue(1L, randomValue); assertThat(exampleClient.getValue(1L), is(randomValue)); } }
package com.github.mlk.junit.rules; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import com.github.mlk.junit.rules.helpers.dynamodb.DynamoExample; import java.util.UUID; import org.junit.Rule; import org.junit.Test; public class HttpDynamoDbRuleTest { @Rule public HttpDynamoDbRule subject = new HttpDynamoDbRule(); @Test public void getterSetterTest() { String randomValue = UUID.randomUUID().toString(); DynamoExample exampleClient = new DynamoExample(AmazonDynamoDBClientBuilder .standard() .withEndpointConfiguration(new EndpointConfiguration(subject.getEndpoint(), "eu-west-1")) .build()); exampleClient.createTable(); exampleClient.setValue(1L, randomValue); assertThat(exampleClient.getValue(1L), is(randomValue)); } }
Support for setting fake AWS creds
Support for setting fake AWS creds
Java
bsd-3-clause
mlk/AssortmentOfJUnitRules,mlk/AssortmentOfJUnitRules
java
## Code Before: package com.github.mlk.junit.rules; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import com.github.mlk.junit.rules.helpers.dynamodb.DynamoExample; import java.util.UUID; import org.junit.Rule; import org.junit.Test; public class HttpDynamoDbRuleTest { @Rule public HttpDynamoDbRule subject = new HttpDynamoDbRule(); @Test public void getterSetterTest() { String randomValue = UUID.randomUUID().toString(); DynamoExample exampleClient = new DynamoExample(AmazonDynamoDBClientBuilder .standard() .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("anything", "anything"))) .withEndpointConfiguration(new EndpointConfiguration(subject.getEndpoint(), "eu-west-1")) .build()); exampleClient.createTable(); exampleClient.setValue(1L, randomValue); assertThat(exampleClient.getValue(1L), is(randomValue)); } } ## Instruction: Support for setting fake AWS creds ## Code After: package com.github.mlk.junit.rules; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import com.github.mlk.junit.rules.helpers.dynamodb.DynamoExample; import java.util.UUID; import org.junit.Rule; import org.junit.Test; public class HttpDynamoDbRuleTest { @Rule public HttpDynamoDbRule subject = new HttpDynamoDbRule(); @Test public void getterSetterTest() { String randomValue = UUID.randomUUID().toString(); DynamoExample exampleClient = new DynamoExample(AmazonDynamoDBClientBuilder .standard() .withEndpointConfiguration(new EndpointConfiguration(subject.getEndpoint(), "eu-west-1")) .build()); exampleClient.createTable(); exampleClient.setValue(1L, randomValue); assertThat(exampleClient.getValue(1L), is(randomValue)); } }
... import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import com.github.mlk.junit.rules.helpers.dynamodb.DynamoExample; ... DynamoExample exampleClient = new DynamoExample(AmazonDynamoDBClientBuilder .standard() .withEndpointConfiguration(new EndpointConfiguration(subject.getEndpoint(), "eu-west-1")) .build()); exampleClient.createTable(); ...
9ee00a148763c7caac1ae0d7dcb3efa496121ee7
lamana/__init__.py
lamana/__init__.py
__title__ = 'lamana' __version__ = '0.4.11-dev' __author__ = 'P. Robinson II' __license__ = 'BSD' __copyright__ = 'Copyright 2015, P. Robinson II' import lamana.input_ import lamana.distributions import lamana.constructs import lamana.theories import lamana.output_ #from lamana.models import * #import lamana.ratios #import lamana.predictions #import lamana.gamuts
__title__ = 'lamana' __version__ = '0.4.11.dev0' # PEP 440 style ##__version__ = '0.4.11-dev' __author__ = 'P. Robinson II' __license__ = 'BSD' __copyright__ = 'Copyright 2015, P. Robinson II' import lamana.input_ import lamana.distributions import lamana.constructs import lamana.theories import lamana.output_ #from lamana.models import * #import lamana.ratios #import lamana.predictions #import lamana.gamuts
Modify dev versioning; see PEP 440
Modify dev versioning; see PEP 440
Python
bsd-3-clause
par2/lamana
python
## Code Before: __title__ = 'lamana' __version__ = '0.4.11-dev' __author__ = 'P. Robinson II' __license__ = 'BSD' __copyright__ = 'Copyright 2015, P. Robinson II' import lamana.input_ import lamana.distributions import lamana.constructs import lamana.theories import lamana.output_ #from lamana.models import * #import lamana.ratios #import lamana.predictions #import lamana.gamuts ## Instruction: Modify dev versioning; see PEP 440 ## Code After: __title__ = 'lamana' __version__ = '0.4.11.dev0' # PEP 440 style ##__version__ = '0.4.11-dev' __author__ = 'P. Robinson II' __license__ = 'BSD' __copyright__ = 'Copyright 2015, P. Robinson II' import lamana.input_ import lamana.distributions import lamana.constructs import lamana.theories import lamana.output_ #from lamana.models import * #import lamana.ratios #import lamana.predictions #import lamana.gamuts
# ... existing code ... __title__ = 'lamana' __version__ = '0.4.11.dev0' # PEP 440 style ##__version__ = '0.4.11-dev' __author__ = 'P. Robinson II' __license__ = 'BSD' __copyright__ = 'Copyright 2015, P. Robinson II' # ... rest of the code ...