commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
593bab981f36f7af52ae55914c18e368e8c1a94f
examples/app-on-ws-init.py
examples/app-on-ws-init.py
from argparse import ArgumentParser import i3ipc i3 = i3ipc.Connection() parser = ArgumentParser(description='Open an application on a given workspace when it is initialized') parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace') parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace') args = parser.parse_args() def on_workspace(i3, e): if e.current.props.name == args.workspace and not len(e.current.leaves()): i3.command('exec {}'.format(args.command)) i3.on('workspace::focus', on_workspace) i3.main()
from argparse import ArgumentParser import i3ipc i3 = i3ipc.Connection() parser = ArgumentParser(description="""Open the given application each time the given workspace is created. For instance, running 'app-on-ws-init.py 6 i3-sensible-terminal' should open your terminal as soon as you create the workspace 6. """) parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace') parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace') args = parser.parse_args() def on_workspace(i3, e): if e.current.props.name == args.workspace and not len(e.current.leaves()): i3.command('exec {}'.format(args.command)) i3.on('workspace::focus', on_workspace) i3.main()
Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example.
Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example.
Python
bsd-3-clause
xenomachina/i3ipc-python,nicoe/i3ipc-python,acrisci/i3ipc-python,chrsclmn/i3ipc-python
python
## Code Before: from argparse import ArgumentParser import i3ipc i3 = i3ipc.Connection() parser = ArgumentParser(description='Open an application on a given workspace when it is initialized') parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace') parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace') args = parser.parse_args() def on_workspace(i3, e): if e.current.props.name == args.workspace and not len(e.current.leaves()): i3.command('exec {}'.format(args.command)) i3.on('workspace::focus', on_workspace) i3.main() ## Instruction: Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example. ## Code After: from argparse import ArgumentParser import i3ipc i3 = i3ipc.Connection() parser = ArgumentParser(description="""Open the given application each time the given workspace is created. For instance, running 'app-on-ws-init.py 6 i3-sensible-terminal' should open your terminal as soon as you create the workspace 6. """) parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace') parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace') args = parser.parse_args() def on_workspace(i3, e): if e.current.props.name == args.workspace and not len(e.current.leaves()): i3.command('exec {}'.format(args.command)) i3.on('workspace::focus', on_workspace) i3.main()
... i3 = i3ipc.Connection() parser = ArgumentParser(description="""Open the given application each time the given workspace is created. For instance, running 'app-on-ws-init.py 6 i3-sensible-terminal' should open your terminal as soon as you create the workspace 6. """) parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace') parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace') args = parser.parse_args() ...
c5872317e5eb1f5cc6e623089d9e89baacdd7f9e
src/Wift.java
src/Wift.java
public class Wift { /** * Wift - The BASIC Programming Language * * BASIC FUNCTIONALITY: * - STRINGS [] * - INTEGERS [] * - ARITHMETIC [] * - VARIABLES [] * * FUNCTIONS: * - PRINT [] * - INPUT [] * - IF [] * - FOR [] * * */ public static void main(){} // #readFile() -> char content // #tokenize() -> list // #parse() -> symbol table }
public class Wift { /** * Wift - The BASIC Programming Language * * BASIC FUNCTIONALITY: * - STRINGS [] * - INTEGERS [] * - ARITHMETIC [] * - VARIABLES [] * * FUNCTIONS: * - PRINT [] * - INPUT [] * - IF [] * - FOR [] * * */ public static void main(String...args){} // #readFile() -> char content // #tokenize() -> list // #parse() -> symbol table }
Add "String...args" into "public..." :maple_leaf:
Add "String...args" into "public..." :maple_leaf:
Java
bsd-2-clause
SuriyaaKudoIsc/wift,SuriyaaKudoIsc/wift
java
## Code Before: public class Wift { /** * Wift - The BASIC Programming Language * * BASIC FUNCTIONALITY: * - STRINGS [] * - INTEGERS [] * - ARITHMETIC [] * - VARIABLES [] * * FUNCTIONS: * - PRINT [] * - INPUT [] * - IF [] * - FOR [] * * */ public static void main(){} // #readFile() -> char content // #tokenize() -> list // #parse() -> symbol table } ## Instruction: Add "String...args" into "public..." :maple_leaf: ## Code After: public class Wift { /** * Wift - The BASIC Programming Language * * BASIC FUNCTIONALITY: * - STRINGS [] * - INTEGERS [] * - ARITHMETIC [] * - VARIABLES [] * * FUNCTIONS: * - PRINT [] * - INPUT [] * - IF [] * - FOR [] * * */ public static void main(String...args){} // #readFile() -> char content // #tokenize() -> list // #parse() -> symbol table }
// ... existing code ... * */ public static void main(String...args){} // #readFile() -> char content // #tokenize() -> list // ... rest of the code ...
1bb67543b4187ee05c616afb731c229aaa94fdd3
setup.py
setup.py
import os import sys from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() requirements = ['httplib2', 'argparse', 'prettytable'] if sys.version_info < (2, 6): requirements.append('simplejson') setup( name = "python-keystoneclient", version = "2.7", description = "Client library for OpenStack Keystone API", long_description = read('README.rst'), url = 'https://github.com/4P/python-keystoneclient', license = 'Apache', author = 'Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss', author_email = '[email protected]', packages = find_packages(exclude=['tests', 'tests.*']), classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires = requirements, tests_require = ["nose", "mock", "mox"], test_suite = "nose.collector", entry_points = { 'console_scripts': ['keystone = keystoneclient.shell:main'] } )
import os import sys from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() requirements = ['httplib2', 'argparse', 'prettytable'] if sys.version_info < (2, 6): requirements.append('simplejson') setup( name = "python-keystoneclient", version = "2012.1", description = "Client library for OpenStack Keystone API", long_description = read('README.rst'), url = 'https://github.com/openstack/python-keystoneclient', license = 'Apache', author = 'Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss', author_email = '[email protected]', packages = find_packages(exclude=['tests', 'tests.*']), classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires = requirements, tests_require = ["nose", "mock", "mox"], test_suite = "nose.collector", entry_points = { 'console_scripts': ['keystone = keystoneclient.shell:main'] } )
Adjust version number to match other deliveries
Adjust version number to match other deliveries Set version from 2.7 to 2012.1 to match the other OpenStack Keystone deliveries (python-keystoneclient will be released as part of Keystone 2012.1~e3). Also adjusted the location of the git repository to match new location. Fixes bug 917656. Change-Id: I4d8d071e3cdc5665e29a89067958f5f1e8964221
Python
apache-2.0
citrix-openstack-build/keystoneauth,jamielennox/keystoneauth,sileht/keystoneauth
python
## Code Before: import os import sys from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() requirements = ['httplib2', 'argparse', 'prettytable'] if sys.version_info < (2, 6): requirements.append('simplejson') setup( name = "python-keystoneclient", version = "2.7", description = "Client library for OpenStack Keystone API", long_description = read('README.rst'), url = 'https://github.com/4P/python-keystoneclient', license = 'Apache', author = 'Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss', author_email = '[email protected]', packages = find_packages(exclude=['tests', 'tests.*']), classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires = requirements, tests_require = ["nose", "mock", "mox"], test_suite = "nose.collector", entry_points = { 'console_scripts': ['keystone = keystoneclient.shell:main'] } ) ## Instruction: Adjust version number to match other deliveries Set version from 2.7 to 2012.1 to match the other OpenStack Keystone deliveries (python-keystoneclient will be released as part of Keystone 2012.1~e3). Also adjusted the location of the git repository to match new location. Fixes bug 917656. Change-Id: I4d8d071e3cdc5665e29a89067958f5f1e8964221 ## Code After: import os import sys from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() requirements = ['httplib2', 'argparse', 'prettytable'] if sys.version_info < (2, 6): requirements.append('simplejson') setup( name = "python-keystoneclient", version = "2012.1", description = "Client library for OpenStack Keystone API", long_description = read('README.rst'), url = 'https://github.com/openstack/python-keystoneclient', license = 'Apache', author = 'Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss', author_email = '[email protected]', packages = find_packages(exclude=['tests', 'tests.*']), classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires = requirements, tests_require = ["nose", "mock", "mox"], test_suite = "nose.collector", entry_points = { 'console_scripts': ['keystone = keystoneclient.shell:main'] } )
# ... existing code ... setup( name = "python-keystoneclient", version = "2012.1", description = "Client library for OpenStack Keystone API", long_description = read('README.rst'), url = 'https://github.com/openstack/python-keystoneclient', license = 'Apache', author = 'Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss', author_email = '[email protected]', # ... rest of the code ...
79771029736b935605a13df181f9780bd967ecb2
test2/typedef/init_and_func.c
test2/typedef/init_and_func.c
// RUN: %check -e %s typedef int f(void) // CHECK: error: typedef storage on function { return 3; } typedef char c = 3; // CHECK: error: initialised typedef main() { int *p = (__typeof(*p))0; // CHECK: !/warn/ for(int _Alignas(long) x = 0; x; x++); // CHECK: !/warn/ return f(); // CHECK: !/warn/ }
// RUN: %check -e %s typedef int f(void) // CHECK: error: typedef storage on function { return 3; } typedef char c = 3; // CHECK: error: initialised typedef main() { int *p = (__typeof(*p))0; // can't check here - we think p is used uninit for(int _Alignas(long) x = 0; x; x++); // CHECK: !/warn/ return f(); // CHECK: !/warn/ }
Remove extra warning in typedef/init/func test
Remove extra warning in typedef/init/func test
C
mit
8l/ucc-c-compiler,8l/ucc-c-compiler,8l/ucc-c-compiler,8l/ucc-c-compiler
c
## Code Before: // RUN: %check -e %s typedef int f(void) // CHECK: error: typedef storage on function { return 3; } typedef char c = 3; // CHECK: error: initialised typedef main() { int *p = (__typeof(*p))0; // CHECK: !/warn/ for(int _Alignas(long) x = 0; x; x++); // CHECK: !/warn/ return f(); // CHECK: !/warn/ } ## Instruction: Remove extra warning in typedef/init/func test ## Code After: // RUN: %check -e %s typedef int f(void) // CHECK: error: typedef storage on function { return 3; } typedef char c = 3; // CHECK: error: initialised typedef main() { int *p = (__typeof(*p))0; // can't check here - we think p is used uninit for(int _Alignas(long) x = 0; x; x++); // CHECK: !/warn/ return f(); // CHECK: !/warn/ }
# ... existing code ... main() { int *p = (__typeof(*p))0; // can't check here - we think p is used uninit for(int _Alignas(long) x = 0; x; x++); // CHECK: !/warn/ # ... rest of the code ...
a97391befb4b5fcd65fbd0fb3ce6dac6fe9ff378
src/main/java/org/synyx/urlaubsverwaltung/availability/api/TimedAbsence.java
src/main/java/org/synyx/urlaubsverwaltung/availability/api/TimedAbsence.java
package org.synyx.urlaubsverwaltung.availability.api; import org.synyx.urlaubsverwaltung.period.DayLength; import java.math.BigDecimal; /** * Details for a (partial) absence of a person on a day. */ class TimedAbsence { enum Type { VACATION, SICK_NOTE, WORK, FREETIME, PUBLIC_HOLIDAY } private final Type type; private final BigDecimal ratio; private final String partOfDay; public TimedAbsence(DayLength dayLength, Type type) { this.type = type; this.ratio = dayLength.getDuration(); this.partOfDay = dayLength.name(); } public Type getType() { return type; } public BigDecimal getRatio() { return ratio; } public String getPartOfDay() { return partOfDay; } }
package org.synyx.urlaubsverwaltung.availability.api; import org.synyx.urlaubsverwaltung.period.DayLength; import java.math.BigDecimal; /** * Details for a (partial) absence of a person on a day. */ class TimedAbsence { enum Type { VACATION, SICK_NOTE, FREETIME, PUBLIC_HOLIDAY } private final Type type; private final BigDecimal ratio; private final String partOfDay; public TimedAbsence(DayLength dayLength, Type type) { this.type = type; this.ratio = dayLength.getDuration(); this.partOfDay = dayLength.name(); } public Type getType() { return type; } public BigDecimal getRatio() { return ratio; } public String getPartOfDay() { return partOfDay; } }
Remove not used timed absence type 'WORK'
Remove not used timed absence type 'WORK'
Java
apache-2.0
synyx/urlaubsverwaltung,synyx/urlaubsverwaltung,synyx/urlaubsverwaltung,synyx/urlaubsverwaltung
java
## Code Before: package org.synyx.urlaubsverwaltung.availability.api; import org.synyx.urlaubsverwaltung.period.DayLength; import java.math.BigDecimal; /** * Details for a (partial) absence of a person on a day. */ class TimedAbsence { enum Type { VACATION, SICK_NOTE, WORK, FREETIME, PUBLIC_HOLIDAY } private final Type type; private final BigDecimal ratio; private final String partOfDay; public TimedAbsence(DayLength dayLength, Type type) { this.type = type; this.ratio = dayLength.getDuration(); this.partOfDay = dayLength.name(); } public Type getType() { return type; } public BigDecimal getRatio() { return ratio; } public String getPartOfDay() { return partOfDay; } } ## Instruction: Remove not used timed absence type 'WORK' ## Code After: package org.synyx.urlaubsverwaltung.availability.api; import org.synyx.urlaubsverwaltung.period.DayLength; import java.math.BigDecimal; /** * Details for a (partial) absence of a person on a day. */ class TimedAbsence { enum Type { VACATION, SICK_NOTE, FREETIME, PUBLIC_HOLIDAY } private final Type type; private final BigDecimal ratio; private final String partOfDay; public TimedAbsence(DayLength dayLength, Type type) { this.type = type; this.ratio = dayLength.getDuration(); this.partOfDay = dayLength.name(); } public Type getType() { return type; } public BigDecimal getRatio() { return ratio; } public String getPartOfDay() { return partOfDay; } }
// ... existing code ... VACATION, SICK_NOTE, FREETIME, PUBLIC_HOLIDAY } // ... rest of the code ...
a263493d251f466ce6fe6ad6d6199ac9564808b5
trunk/CVSROOT/org.mwc.cmap.core/src/org/mwc/cmap/core/DataTypes/TrackData/TrackDataProvider.java
trunk/CVSROOT/org.mwc.cmap.core/src/org/mwc/cmap/core/DataTypes/TrackData/TrackDataProvider.java
/** * */ package org.mwc.cmap.core.DataTypes.TrackData; import Debrief.Tools.Tote.WatchableList; /** * @author ian.mayo * */ public interface TrackDataProvider { public static interface TrackDataListener { /** find out that the primary has changed * * @param primary the primary track */ public void primaryUpdated(WatchableList primary); /** find out that the secondaries have changed * * @param secondaries list of secondary tracks */ public void secondariesUpdated(WatchableList[] secondaries); } /** declare that we want to be informed about changes * in selected tracks */ public void addTrackDataListener(TrackDataListener listener); /** find out what the primary track is * */ public WatchableList getPrimaryTrack(); /** find out what the secondary track is * */ public WatchableList[] getSecondaryTracks(); }
/** * */ package org.mwc.cmap.core.DataTypes.TrackData; import Debrief.Tools.Tote.WatchableList; /** * @author ian.mayo * */ public interface TrackDataProvider { public static interface TrackDataListener { /** find out that the primary has changed * * @param primary the primary track */ public void primaryUpdated(WatchableList primary); /** find out that the secondaries have changed * * @param secondaries list of secondary tracks */ public void secondariesUpdated(WatchableList[] secondaries); } /** declare that we want to be informed about changes * in selected tracks */ public void addTrackDataListener(TrackDataListener listener); /** forget that somebody wants to know about track changes * */ public void removeTrackDataListener(TrackDataListener listener); /** find out what the primary track is * */ public WatchableList getPrimaryTrack(); /** find out what the secondary track is * */ public WatchableList[] getSecondaryTracks(); }
Allow removal of track data
Allow removal of track data git-svn-id: d2601f1668e3cd2de409f5c059006a6eeada0abf@205 cb33b658-6c9e-41a7-9690-cba343611204
Java
epl-1.0
theanuradha/debrief,pecko/debrief,pecko/debrief,pecko/debrief,debrief/debrief,pecko/debrief,alastrina123/debrief,alastrina123/debrief,theanuradha/debrief,pecko/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,alastrina123/debrief,alastrina123/debrief,debrief/debrief,pecko/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,pecko/debrief,theanuradha/debrief,debrief/debrief,debrief/debrief,alastrina123/debrief,alastrina123/debrief,alastrina123/debrief
java
## Code Before: /** * */ package org.mwc.cmap.core.DataTypes.TrackData; import Debrief.Tools.Tote.WatchableList; /** * @author ian.mayo * */ public interface TrackDataProvider { public static interface TrackDataListener { /** find out that the primary has changed * * @param primary the primary track */ public void primaryUpdated(WatchableList primary); /** find out that the secondaries have changed * * @param secondaries list of secondary tracks */ public void secondariesUpdated(WatchableList[] secondaries); } /** declare that we want to be informed about changes * in selected tracks */ public void addTrackDataListener(TrackDataListener listener); /** find out what the primary track is * */ public WatchableList getPrimaryTrack(); /** find out what the secondary track is * */ public WatchableList[] getSecondaryTracks(); } ## Instruction: Allow removal of track data git-svn-id: d2601f1668e3cd2de409f5c059006a6eeada0abf@205 cb33b658-6c9e-41a7-9690-cba343611204 ## Code After: /** * */ package org.mwc.cmap.core.DataTypes.TrackData; import Debrief.Tools.Tote.WatchableList; /** * @author ian.mayo * */ public interface TrackDataProvider { public static interface TrackDataListener { /** find out that the primary has changed * * @param primary the primary track */ public void primaryUpdated(WatchableList primary); /** find out that the secondaries have changed * * @param secondaries list of secondary tracks */ public void secondariesUpdated(WatchableList[] secondaries); } /** declare that we want to be informed about changes * in selected tracks */ public void addTrackDataListener(TrackDataListener listener); /** forget that somebody wants to know about track changes * */ public void removeTrackDataListener(TrackDataListener listener); /** find out what the primary track is * */ public WatchableList getPrimaryTrack(); /** find out what the secondary track is * */ public WatchableList[] getSecondaryTracks(); }
# ... existing code ... * in selected tracks */ public void addTrackDataListener(TrackDataListener listener); /** forget that somebody wants to know about track changes * */ public void removeTrackDataListener(TrackDataListener listener); /** find out what the primary track is * # ... rest of the code ...
c37cafb9c83e9f9bcc806cdb979f127fe924fa00
tools/get_binary.py
tools/get_binary.py
import os import sys import shutil from version import full_version from optparse import OptionParser import pkgutils def main(): usage = "usage: %prog [destination path]" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 1: parser.print_usage() sys.exit(1) dest = args[0] shutil.rmtree(dest, True) os.mkdir(dest) build_dir = pkgutils.package_builder_dir() binary_name = pkgutils.package_binary() binary = os.path.join(build_dir, binary_name) dest = os.path.join(dest, '%s-monitoring-agent-%s' % (pkgutils.pkg_dir(), full_version)) if pkgutils.pkg_type() == 'windows': dest += '.msi' print("Moving %s to %s" % (binary, dest)) shutil.move(binary, dest) if pkgutils.pkg_type() != 'windows': shutil.move(binary + ".sig", dest + ".sig") if __name__ == "__main__": main()
import os import sys import shutil from version import full_version from optparse import OptionParser import pkgutils def main(): usage = "usage: %prog [destination path]" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 1: parser.print_usage() sys.exit(1) dest = args[0] build_dir = pkgutils.package_builder_dir() binary_name = pkgutils.package_binary() binary = os.path.join(build_dir, binary_name) dest = os.path.join(dest, '%s-monitoring-agent-%s' % (pkgutils.pkg_dir(), full_version)) if pkgutils.pkg_type() == 'windows': dest += '.msi' print("Moving %s to %s" % (binary, dest)) shutil.move(binary, dest) if pkgutils.pkg_type() != 'windows': shutil.move(binary + ".sig", dest + ".sig") if __name__ == "__main__": main()
Revert "remove the dest tree and recreate it"
Revert "remove the dest tree and recreate it" This reverts commit becc4657acea505594836e62c49de2b4cb0160a9.
Python
apache-2.0
christopherjwang/rackspace-monitoring-agent,kaustavha/rackspace-monitoring-agent,christopherjwang/rackspace-monitoring-agent,virgo-agent-toolkit/rackspace-monitoring-agent,virgo-agent-toolkit/rackspace-monitoring-agent,kaustavha/rackspace-monitoring-agent,AlphaStaxLLC/rackspace-monitoring-agent,AlphaStaxLLC/rackspace-monitoring-agent
python
## Code Before: import os import sys import shutil from version import full_version from optparse import OptionParser import pkgutils def main(): usage = "usage: %prog [destination path]" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 1: parser.print_usage() sys.exit(1) dest = args[0] shutil.rmtree(dest, True) os.mkdir(dest) build_dir = pkgutils.package_builder_dir() binary_name = pkgutils.package_binary() binary = os.path.join(build_dir, binary_name) dest = os.path.join(dest, '%s-monitoring-agent-%s' % (pkgutils.pkg_dir(), full_version)) if pkgutils.pkg_type() == 'windows': dest += '.msi' print("Moving %s to %s" % (binary, dest)) shutil.move(binary, dest) if pkgutils.pkg_type() != 'windows': shutil.move(binary + ".sig", dest + ".sig") if __name__ == "__main__": main() ## Instruction: Revert "remove the dest tree and recreate it" This reverts commit becc4657acea505594836e62c49de2b4cb0160a9. ## Code After: import os import sys import shutil from version import full_version from optparse import OptionParser import pkgutils def main(): usage = "usage: %prog [destination path]" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 1: parser.print_usage() sys.exit(1) dest = args[0] build_dir = pkgutils.package_builder_dir() binary_name = pkgutils.package_binary() binary = os.path.join(build_dir, binary_name) dest = os.path.join(dest, '%s-monitoring-agent-%s' % (pkgutils.pkg_dir(), full_version)) if pkgutils.pkg_type() == 'windows': dest += '.msi' print("Moving %s to %s" % (binary, dest)) shutil.move(binary, dest) if pkgutils.pkg_type() != 'windows': shutil.move(binary + ".sig", dest + ".sig") if __name__ == "__main__": main()
... sys.exit(1) dest = args[0] build_dir = pkgutils.package_builder_dir() binary_name = pkgutils.package_binary() binary = os.path.join(build_dir, binary_name) ...
0c4d62df6aabb68d10e5d33ad2358eec98939958
src/main/java/voot/oauth/ExternalGroupsQuerier.java
src/main/java/voot/oauth/ExternalGroupsQuerier.java
package voot.oauth; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ForkJoinPool; import java.util.stream.Collectors; import com.google.common.base.Preconditions; import voot.oauth.valueobject.Group; public class ExternalGroupsQuerier { private final List<GroupClient> groupClients; public ExternalGroupsQuerier(List<GroupClient> groupClients) { Preconditions.checkArgument(groupClients.size() > 0, "No clients configured"); this.groupClients = groupClients; } public List<Group> getMyGroups(String uid, String schacHomeOrganization) { // we're I/O bound and groupClients.size() will be < 5 ForkJoinPool forkJoinPool = new ForkJoinPool(groupClients.size()); try { return forkJoinPool.submit(() -> this.groupClients.parallelStream() .filter(client -> client.isAuthorative(schacHomeOrganization)) .map(client -> client.getMemberships(uid, schacHomeOrganization)) .flatMap(Collection::stream) .collect(Collectors.toList())).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException("Unable to schedule querying of external group providers.", e); } } }
package voot.oauth; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ForkJoinPool; import java.util.stream.Collectors; import com.google.common.base.Preconditions; import voot.oauth.valueobject.Group; public class ExternalGroupsQuerier { private final List<GroupClient> groupClients; private final ForkJoinPool forkJoinPool; public ExternalGroupsQuerier(List<GroupClient> groupClients) { Preconditions.checkArgument(groupClients.size() > 0, "No clients configured"); this.groupClients = groupClients; forkJoinPool = new ForkJoinPool(groupClients.size() * 20); // we're I/O bound. } public List<Group> getMyGroups(String uid, String schacHomeOrganization) { try { return forkJoinPool.submit(() -> this.groupClients.parallelStream() .filter(client -> client.isAuthorative(schacHomeOrganization)) .map(client -> client.getMemberships(uid, schacHomeOrganization)) .flatMap(Collection::stream) .collect(Collectors.toList())).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException("Unable to schedule querying of external group providers.", e); } } }
Set the fork-join pool globally to prevent DOS-vulnerability
Set the fork-join pool globally to prevent DOS-vulnerability
Java
apache-2.0
OpenConext/OpenConext-voot
java
## Code Before: package voot.oauth; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ForkJoinPool; import java.util.stream.Collectors; import com.google.common.base.Preconditions; import voot.oauth.valueobject.Group; public class ExternalGroupsQuerier { private final List<GroupClient> groupClients; public ExternalGroupsQuerier(List<GroupClient> groupClients) { Preconditions.checkArgument(groupClients.size() > 0, "No clients configured"); this.groupClients = groupClients; } public List<Group> getMyGroups(String uid, String schacHomeOrganization) { // we're I/O bound and groupClients.size() will be < 5 ForkJoinPool forkJoinPool = new ForkJoinPool(groupClients.size()); try { return forkJoinPool.submit(() -> this.groupClients.parallelStream() .filter(client -> client.isAuthorative(schacHomeOrganization)) .map(client -> client.getMemberships(uid, schacHomeOrganization)) .flatMap(Collection::stream) .collect(Collectors.toList())).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException("Unable to schedule querying of external group providers.", e); } } } ## Instruction: Set the fork-join pool globally to prevent DOS-vulnerability ## Code After: package voot.oauth; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ForkJoinPool; import java.util.stream.Collectors; import com.google.common.base.Preconditions; import voot.oauth.valueobject.Group; public class ExternalGroupsQuerier { private final List<GroupClient> groupClients; private final ForkJoinPool forkJoinPool; public ExternalGroupsQuerier(List<GroupClient> groupClients) { Preconditions.checkArgument(groupClients.size() > 0, "No clients configured"); this.groupClients = groupClients; forkJoinPool = new ForkJoinPool(groupClients.size() * 20); // we're I/O bound. } public List<Group> getMyGroups(String uid, String schacHomeOrganization) { try { return forkJoinPool.submit(() -> this.groupClients.parallelStream() .filter(client -> client.isAuthorative(schacHomeOrganization)) .map(client -> client.getMemberships(uid, schacHomeOrganization)) .flatMap(Collection::stream) .collect(Collectors.toList())).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException("Unable to schedule querying of external group providers.", e); } } }
# ... existing code ... public class ExternalGroupsQuerier { private final List<GroupClient> groupClients; private final ForkJoinPool forkJoinPool; public ExternalGroupsQuerier(List<GroupClient> groupClients) { Preconditions.checkArgument(groupClients.size() > 0, "No clients configured"); this.groupClients = groupClients; forkJoinPool = new ForkJoinPool(groupClients.size() * 20); // we're I/O bound. } public List<Group> getMyGroups(String uid, String schacHomeOrganization) { try { return forkJoinPool.submit(() -> this.groupClients.parallelStream() .filter(client -> client.isAuthorative(schacHomeOrganization)) # ... rest of the code ...
301f0849d46cf25d12dfca3678fb4c6cf4347dc7
alien4cloud-core/src/main/java/alien4cloud/model/templates/TopologyTemplate.java
alien4cloud-core/src/main/java/alien4cloud/model/templates/TopologyTemplate.java
package alien4cloud.model.templates; import lombok.Getter; import lombok.Setter; import org.elasticsearch.annotation.ESObject; import org.elasticsearch.annotation.Id; import org.elasticsearch.annotation.StringField; import org.elasticsearch.annotation.query.TermFilter; import org.elasticsearch.mapping.IndexType; @Getter @ESObject public class TopologyTemplate { @Id @Setter private String id; @StringField(includeInAll = true, indexType = IndexType.not_analyzed) @TermFilter @Setter private String name; @Setter private String description; private String topologyId; }
package alien4cloud.model.templates; import lombok.Getter; import lombok.Setter; import org.elasticsearch.annotation.ESObject; import org.elasticsearch.annotation.Id; import org.elasticsearch.annotation.StringField; import org.elasticsearch.annotation.query.TermFilter; import org.elasticsearch.mapping.IndexType; @Getter @ESObject public class TopologyTemplate { @Id @Setter private String id; @StringField(includeInAll = true, indexType = IndexType.not_analyzed) @TermFilter @Setter private String name; @Setter private String description; }
Remove useless topology id that was never set as they are configured in topology template versions.
Remove useless topology id that was never set as they are configured in topology template versions.
Java
apache-2.0
alien4cloud/alien4cloud,OresteVisari/alien4cloud,PierreLemordant/alien4cloud,broly-git/alien4cloud,OresteVisari/alien4cloud,alien4cloud/alien4cloud,san-tak/alien4cloud,broly-git/alien4cloud,OresteVisari/alien4cloud,alien4cloud/alien4cloud,OresteVisari/alien4cloud,broly-git/alien4cloud,san-tak/alien4cloud,san-tak/alien4cloud,PierreLemordant/alien4cloud,broly-git/alien4cloud,san-tak/alien4cloud,PierreLemordant/alien4cloud,PierreLemordant/alien4cloud,alien4cloud/alien4cloud
java
## Code Before: package alien4cloud.model.templates; import lombok.Getter; import lombok.Setter; import org.elasticsearch.annotation.ESObject; import org.elasticsearch.annotation.Id; import org.elasticsearch.annotation.StringField; import org.elasticsearch.annotation.query.TermFilter; import org.elasticsearch.mapping.IndexType; @Getter @ESObject public class TopologyTemplate { @Id @Setter private String id; @StringField(includeInAll = true, indexType = IndexType.not_analyzed) @TermFilter @Setter private String name; @Setter private String description; private String topologyId; } ## Instruction: Remove useless topology id that was never set as they are configured in topology template versions. ## Code After: package alien4cloud.model.templates; import lombok.Getter; import lombok.Setter; import org.elasticsearch.annotation.ESObject; import org.elasticsearch.annotation.Id; import org.elasticsearch.annotation.StringField; import org.elasticsearch.annotation.query.TermFilter; import org.elasticsearch.mapping.IndexType; @Getter @ESObject public class TopologyTemplate { @Id @Setter private String id; @StringField(includeInAll = true, indexType = IndexType.not_analyzed) @TermFilter @Setter private String name; @Setter private String description; }
// ... existing code ... private String name; @Setter private String description; } // ... rest of the code ...
ccaf9660077fe97888a27a4b954eba31046deccc
app/src/main/java/coatapp/coat/ForecastRequestTask.java
app/src/main/java/coatapp/coat/ForecastRequestTask.java
package coatapp.coat; import android.os.AsyncTask; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; public class ForecastRequestTask extends AsyncTask<String, Void, String> { public String result; public static String getForecastRequest(String urlToRead) throws Exception { StringBuilder result = new StringBuilder(); URL url = new URL(urlToRead); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); String line; while ((line = rd.readLine()) != null) { result.append(line); } rd.close(); return result.toString(); } @Override protected String doInBackground(String... params) { String forecastRequestResponse = ""; try { forecastRequestResponse = getForecastRequest(params[0].toString()); result = forecastRequestResponse; } catch (Exception e) { e.printStackTrace(); } return forecastRequestResponse; } @Override protected void onPostExecute(String message) { //process message } }
package coatapp.coat; import android.os.AsyncTask; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; public class ForecastRequestTask extends AsyncTask<String, Void, String> { private static String getForecastRequest(String urlToRead) throws Exception { StringBuilder result = new StringBuilder(); URL url = new URL(urlToRead); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); String line; while ((line = rd.readLine()) != null) { result.append(line); } rd.close(); return result.toString(); } @Override protected String doInBackground(String... params) { String forecastRequestResponse = ""; try { forecastRequestResponse = getForecastRequest(params[0]); } catch (Exception e) { e.printStackTrace(); } return forecastRequestResponse; } @Override protected void onPostExecute(String message) { //process message } }
Simplify class by removing unnecessary stuff
Simplify class by removing unnecessary stuff
Java
mit
xlpnic/coat
java
## Code Before: package coatapp.coat; import android.os.AsyncTask; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; public class ForecastRequestTask extends AsyncTask<String, Void, String> { public String result; public static String getForecastRequest(String urlToRead) throws Exception { StringBuilder result = new StringBuilder(); URL url = new URL(urlToRead); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); String line; while ((line = rd.readLine()) != null) { result.append(line); } rd.close(); return result.toString(); } @Override protected String doInBackground(String... params) { String forecastRequestResponse = ""; try { forecastRequestResponse = getForecastRequest(params[0].toString()); result = forecastRequestResponse; } catch (Exception e) { e.printStackTrace(); } return forecastRequestResponse; } @Override protected void onPostExecute(String message) { //process message } } ## Instruction: Simplify class by removing unnecessary stuff ## Code After: package coatapp.coat; import android.os.AsyncTask; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; public class ForecastRequestTask extends AsyncTask<String, Void, String> { private static String getForecastRequest(String urlToRead) throws Exception { StringBuilder result = new StringBuilder(); URL url = new URL(urlToRead); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); String line; while ((line = rd.readLine()) != null) { result.append(line); } rd.close(); return result.toString(); } @Override protected String doInBackground(String... params) { String forecastRequestResponse = ""; try { forecastRequestResponse = getForecastRequest(params[0]); } catch (Exception e) { e.printStackTrace(); } return forecastRequestResponse; } @Override protected void onPostExecute(String message) { //process message } }
// ... existing code ... public class ForecastRequestTask extends AsyncTask<String, Void, String> { private static String getForecastRequest(String urlToRead) throws Exception { StringBuilder result = new StringBuilder(); URL url = new URL(urlToRead); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); // ... modified code ... String forecastRequestResponse = ""; try { forecastRequestResponse = getForecastRequest(params[0]); } catch (Exception e) { e.printStackTrace(); } // ... rest of the code ...
550eaed89935300b8581ed77290925eda2fb8aeb
build-logic-commons/gradle-plugin/build.gradle.kts
build-logic-commons/gradle-plugin/build.gradle.kts
plugins { `kotlin-dsl` } group = "gradlebuild" description = "Provides plugins used to create a Gradle plugin with Groovy or Kotlin DSL within build-logic builds" java { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 } dependencies { compileOnly("com.gradle:gradle-enterprise-gradle-plugin:3.10.3") // TODO remove and keep 1.7 only once wrapper is updated if (KotlinVersion.CURRENT.isAtLeast(1, 7)) { implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.4.1") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.7.10") } else { implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.3.3") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.6.21") } implementation("org.gradle.kotlin:gradle-kotlin-dsl-conventions:0.8.0") }
plugins { `kotlin-dsl` } group = "gradlebuild" description = "Provides plugins used to create a Gradle plugin with Groovy or Kotlin DSL within build-logic builds" java { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 } dependencies { compileOnly("com.gradle:gradle-enterprise-gradle-plugin:3.10.3") implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.4.1") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.7.10") implementation("org.gradle.kotlin:gradle-kotlin-dsl-conventions:0.8.0") }
Remove Kotlin <> 1.7 branching in build logic
Remove Kotlin <> 1.7 branching in build logic Signed-off-by: Paul Merlin <[email protected]>
Kotlin
apache-2.0
gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle
kotlin
## Code Before: plugins { `kotlin-dsl` } group = "gradlebuild" description = "Provides plugins used to create a Gradle plugin with Groovy or Kotlin DSL within build-logic builds" java { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 } dependencies { compileOnly("com.gradle:gradle-enterprise-gradle-plugin:3.10.3") // TODO remove and keep 1.7 only once wrapper is updated if (KotlinVersion.CURRENT.isAtLeast(1, 7)) { implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.4.1") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.7.10") } else { implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.3.3") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.6.21") } implementation("org.gradle.kotlin:gradle-kotlin-dsl-conventions:0.8.0") } ## Instruction: Remove Kotlin <> 1.7 branching in build logic Signed-off-by: Paul Merlin <[email protected]> ## Code After: plugins { `kotlin-dsl` } group = "gradlebuild" description = "Provides plugins used to create a Gradle plugin with Groovy or Kotlin DSL within build-logic builds" java { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 } dependencies { compileOnly("com.gradle:gradle-enterprise-gradle-plugin:3.10.3") implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.4.1") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.7.10") implementation("org.gradle.kotlin:gradle-kotlin-dsl-conventions:0.8.0") }
# ... existing code ... dependencies { compileOnly("com.gradle:gradle-enterprise-gradle-plugin:3.10.3") implementation("org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:2.4.1") implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.7.10") implementation("org.gradle.kotlin:gradle-kotlin-dsl-conventions:0.8.0") } # ... rest of the code ...
211091ed92e8bafcac1e9b1c523d392b609fca73
saleor/core/tracing.py
saleor/core/tracing.py
from functools import partial from graphene.types.resolver import default_resolver from graphql import ResolveInfo def should_trace(info: ResolveInfo) -> bool: if info.field_name not in info.parent_type.fields: return False resolver = info.parent_type.fields[info.field_name].resolver return not ( resolver is None or is_default_resolver(resolver) or is_introspection_field(info) ) def is_introspection_field(info: ResolveInfo): for path in info.path: if isinstance(path, str) and path.startswith("__"): return True return False def is_default_resolver(resolver): while isinstance(resolver, partial): resolver = resolver.func if resolver is default_resolver: return True return resolver is default_resolver
from functools import partial from graphene.types.resolver import default_resolver from graphql import ResolveInfo def should_trace(info: ResolveInfo) -> bool: if info.field_name not in info.parent_type.fields: return False resolver = info.parent_type.fields[info.field_name].resolver return not ( resolver is None or is_default_resolver(resolver) or is_introspection_field(info) ) def is_introspection_field(info: ResolveInfo): if info.path is not None: for path in info.path: if isinstance(path, str) and path.startswith("__"): return True return False def is_default_resolver(resolver): while isinstance(resolver, partial): resolver = resolver.func if resolver is default_resolver: return True return resolver is default_resolver
Fix mypy error in introspection check
Fix mypy error in introspection check
Python
bsd-3-clause
mociepka/saleor,mociepka/saleor,mociepka/saleor
python
## Code Before: from functools import partial from graphene.types.resolver import default_resolver from graphql import ResolveInfo def should_trace(info: ResolveInfo) -> bool: if info.field_name not in info.parent_type.fields: return False resolver = info.parent_type.fields[info.field_name].resolver return not ( resolver is None or is_default_resolver(resolver) or is_introspection_field(info) ) def is_introspection_field(info: ResolveInfo): for path in info.path: if isinstance(path, str) and path.startswith("__"): return True return False def is_default_resolver(resolver): while isinstance(resolver, partial): resolver = resolver.func if resolver is default_resolver: return True return resolver is default_resolver ## Instruction: Fix mypy error in introspection check ## Code After: from functools import partial from graphene.types.resolver import default_resolver from graphql import ResolveInfo def should_trace(info: ResolveInfo) -> bool: if info.field_name not in info.parent_type.fields: return False resolver = info.parent_type.fields[info.field_name].resolver return not ( resolver is None or is_default_resolver(resolver) or is_introspection_field(info) ) def is_introspection_field(info: ResolveInfo): if info.path is not None: for path in info.path: if isinstance(path, str) and path.startswith("__"): return True return False def is_default_resolver(resolver): while isinstance(resolver, partial): resolver = resolver.func if resolver is default_resolver: return True return resolver is default_resolver
... def is_introspection_field(info: ResolveInfo): if info.path is not None: for path in info.path: if isinstance(path, str) and path.startswith("__"): return True return False ...
68e03d913fd6976981fd55f9da6df061319e849f
DND/src/edu/teco/dnd/module/messages/loadStartBlock/BlockMessageHandler.java
DND/src/edu/teco/dnd/module/messages/loadStartBlock/BlockMessageHandler.java
package edu.teco.dnd.module.messages.loadStartBlock; import java.util.UUID; import edu.teco.dnd.module.ModuleApplicationManager; import edu.teco.dnd.network.ConnectionManager; import edu.teco.dnd.network.MessageHandler; import edu.teco.dnd.network.messages.Response; public class BlockMessageHandler implements MessageHandler<BlockMessage> { final ModuleApplicationManager appManager; public BlockMessageHandler(ModuleApplicationManager appManager) { this.appManager = appManager; } @Override public Response handleMessage(ConnectionManager connMan, UUID remoteUUID, BlockMessage message) { if (appManager.scheduleBlock(message.getApplicationID(), message.block, message.scheduleToId)) { return new BlockAck(); } else { return new BlockNak(); } } }
package edu.teco.dnd.module.messages.loadStartBlock; import java.util.UUID; import edu.teco.dnd.module.ModuleApplicationManager; import edu.teco.dnd.network.ConnectionManager; import edu.teco.dnd.network.MessageHandler; import edu.teco.dnd.network.messages.Response; public class BlockMessageHandler implements MessageHandler<BlockMessage> { final ModuleApplicationManager appManager; public BlockMessageHandler(ModuleApplicationManager appManager) { this.appManager = appManager; } @Override public Response handleMessage(ConnectionManager connMan, UUID remoteUUID, BlockMessage message) { try { appManager.scheduleBlock(message.getApplicationID(), message.block, message.scheduleToId); } catch (Exception e) { return new BlockNak(); } return new BlockAck(); } }
Rewrite handler to work with exceptions instead of returned booleans.
Rewrite handler to work with exceptions instead of returned booleans.
Java
apache-2.0
DesignAndDeploy/dnd,DesignAndDeploy/dnd,DesignAndDeploy/dnd
java
## Code Before: package edu.teco.dnd.module.messages.loadStartBlock; import java.util.UUID; import edu.teco.dnd.module.ModuleApplicationManager; import edu.teco.dnd.network.ConnectionManager; import edu.teco.dnd.network.MessageHandler; import edu.teco.dnd.network.messages.Response; public class BlockMessageHandler implements MessageHandler<BlockMessage> { final ModuleApplicationManager appManager; public BlockMessageHandler(ModuleApplicationManager appManager) { this.appManager = appManager; } @Override public Response handleMessage(ConnectionManager connMan, UUID remoteUUID, BlockMessage message) { if (appManager.scheduleBlock(message.getApplicationID(), message.block, message.scheduleToId)) { return new BlockAck(); } else { return new BlockNak(); } } } ## Instruction: Rewrite handler to work with exceptions instead of returned booleans. ## Code After: package edu.teco.dnd.module.messages.loadStartBlock; import java.util.UUID; import edu.teco.dnd.module.ModuleApplicationManager; import edu.teco.dnd.network.ConnectionManager; import edu.teco.dnd.network.MessageHandler; import edu.teco.dnd.network.messages.Response; public class BlockMessageHandler implements MessageHandler<BlockMessage> { final ModuleApplicationManager appManager; public BlockMessageHandler(ModuleApplicationManager appManager) { this.appManager = appManager; } @Override public Response handleMessage(ConnectionManager connMan, UUID remoteUUID, BlockMessage message) { try { appManager.scheduleBlock(message.getApplicationID(), message.block, message.scheduleToId); } catch (Exception e) { return new BlockNak(); } return new BlockAck(); } }
// ... existing code ... @Override public Response handleMessage(ConnectionManager connMan, UUID remoteUUID, BlockMessage message) { try { appManager.scheduleBlock(message.getApplicationID(), message.block, message.scheduleToId); } catch (Exception e) { return new BlockNak(); } return new BlockAck(); } } // ... rest of the code ...
c4e71b56e74ab8b81a670c690fef6942d4a412b4
ocds/storage/backends/fs.py
ocds/storage/backends/fs.py
import os import os.path import logging import datetime from .base import Storage from ocds.storage.errors import InvalidPath logger = logging.getLogger(__name__) class FSStorage(Storage): def __init__(self, base_path): self.base_path = base_path if not os.path.exists(self.base_path): logger.warn('Initial path not exists. Creating') try: os.makedirs(self.base_path) except (IOError, OSError) as e: logger.error("Couldn't create destination dir." "Error {}".format(e)) raise InvalidPath('Not destination folder') def _from_string(self, string): return datetime.datetime.strptime('%Y-%m-%dT%H:%M:$S') def _path_from_date(self, date): if isinstance(date, str): path =
import os import os.path import logging import datetime import simplejson as json from .base import Storage from ocds.export.helpers import encoder from ocds.storage.errors import InvalidPath join = os.path.join logger = logging.getLogger(__name__) class FSStorage(Storage): def __init__(self, base_path): self.base_path = base_path self.path_fmt = '%Y-%m-%d/%H/%M%/%S' if not os.path.exists(self.base_path): logger.warn('Initial path not exists. Creating') try: os.makedirs(self.base_path) except (IOError, OSError) as e: logger.error("Couldn't create destination dir." "Error {}".format(e)) raise InvalidPath('Not destination folder') def _walk(self): for path, _, files in os.walk(self.base_path): for f in files: yield join(path, f) def _write(self, obj): path = join(self.base_path, self._path_from_date(obj['date'])) file_path = join(path, '{}.json'.format(obj['id'])) with open(file_path, 'w') as out: out.write(encoder(obj)) def _load(self, key): with open(join(self.base_path, key)) as out: result = json.load(out) return result def _from_string(self, string): return datetime.datetime.strptime('%Y-%m-%dT%H:%M:$S') def _path_from_date(self, date): if isinstance(date, str): path = self._from_string(date).srtftime(self.path_fmt) if isinstance(date, datetime.date): path = date.strftime(self.path_fmt) return path def __contains__(self, key): try: fs = open(join(self.base_path, key)) result = True except (IOError, OSError): result = False finally: fs.close() return result def __iter__(self): for f in self._walk(): yield f def save(self, obj): self._write(obj) def get(self, key): return self._load(key)
Add basic file system storage
Add basic file system storage
Python
apache-2.0
yshalenyk/openprocurement.ocds.export,yshalenyk/ocds.storage,yshalenyk/ocds.export,yshalenyk/openprocurement.ocds.export
python
## Code Before: import os import os.path import logging import datetime from .base import Storage from ocds.storage.errors import InvalidPath logger = logging.getLogger(__name__) class FSStorage(Storage): def __init__(self, base_path): self.base_path = base_path if not os.path.exists(self.base_path): logger.warn('Initial path not exists. Creating') try: os.makedirs(self.base_path) except (IOError, OSError) as e: logger.error("Couldn't create destination dir." "Error {}".format(e)) raise InvalidPath('Not destination folder') def _from_string(self, string): return datetime.datetime.strptime('%Y-%m-%dT%H:%M:$S') def _path_from_date(self, date): if isinstance(date, str): path = ## Instruction: Add basic file system storage ## Code After: import os import os.path import logging import datetime import simplejson as json from .base import Storage from ocds.export.helpers import encoder from ocds.storage.errors import InvalidPath join = os.path.join logger = logging.getLogger(__name__) class FSStorage(Storage): def __init__(self, base_path): self.base_path = base_path self.path_fmt = '%Y-%m-%d/%H/%M%/%S' if not os.path.exists(self.base_path): logger.warn('Initial path not exists. Creating') try: os.makedirs(self.base_path) except (IOError, OSError) as e: logger.error("Couldn't create destination dir." "Error {}".format(e)) raise InvalidPath('Not destination folder') def _walk(self): for path, _, files in os.walk(self.base_path): for f in files: yield join(path, f) def _write(self, obj): path = join(self.base_path, self._path_from_date(obj['date'])) file_path = join(path, '{}.json'.format(obj['id'])) with open(file_path, 'w') as out: out.write(encoder(obj)) def _load(self, key): with open(join(self.base_path, key)) as out: result = json.load(out) return result def _from_string(self, string): return datetime.datetime.strptime('%Y-%m-%dT%H:%M:$S') def _path_from_date(self, date): if isinstance(date, str): path = self._from_string(date).srtftime(self.path_fmt) if isinstance(date, datetime.date): path = date.strftime(self.path_fmt) return path def __contains__(self, key): try: fs = open(join(self.base_path, key)) result = True except (IOError, OSError): result = False finally: fs.close() return result def __iter__(self): for f in self._walk(): yield f def save(self, obj): self._write(obj) def get(self, key): return self._load(key)
// ... existing code ... import os.path import logging import datetime import simplejson as json from .base import Storage from ocds.export.helpers import encoder from ocds.storage.errors import InvalidPath join = os.path.join logger = logging.getLogger(__name__) // ... modified code ... def __init__(self, base_path): self.base_path = base_path self.path_fmt = '%Y-%m-%d/%H/%M%/%S' if not os.path.exists(self.base_path): logger.warn('Initial path not exists. Creating') try: ... "Error {}".format(e)) raise InvalidPath('Not destination folder') def _walk(self): for path, _, files in os.walk(self.base_path): for f in files: yield join(path, f) def _write(self, obj): path = join(self.base_path, self._path_from_date(obj['date'])) file_path = join(path, '{}.json'.format(obj['id'])) with open(file_path, 'w') as out: out.write(encoder(obj)) def _load(self, key): with open(join(self.base_path, key)) as out: result = json.load(out) return result def _from_string(self, string): return datetime.datetime.strptime('%Y-%m-%dT%H:%M:$S') def _path_from_date(self, date): if isinstance(date, str): path = self._from_string(date).srtftime(self.path_fmt) if isinstance(date, datetime.date): path = date.strftime(self.path_fmt) return path def __contains__(self, key): try: fs = open(join(self.base_path, key)) result = True except (IOError, OSError): result = False finally: fs.close() return result def __iter__(self): for f in self._walk(): yield f def save(self, obj): self._write(obj) def get(self, key): return self._load(key) // ... rest of the code ...
3243f199fb46d2d6f95ae9afd18b1570f9b5f529
astatsscraper/parsing.py
astatsscraper/parsing.py
def parse_app_page(response): # Should always be able to grab a title title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip() # Parse times into floats time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip() time_to_hundo = time_to_hundo.replace(',', '.') time_to_hundo = float(time_to_hundo) # Points may or may not be present, default to 0 if absent points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract() if not points: points = 0 else: points = int(points[0].strip()) yield { 'title': title, 'time to 100%': time_to_hundo, 'points': points, } def parse_search_result_for_apps(response): for href in response.xpath('//table//table//a/@href'): relative_url = href.extract() if relative_url.startswith('Steam_Game_Info.php?AppID='): yield relative_url[:len('Steam_Game_Info.php?AppID=')]
def parse_app_page(response): # Should always be able to grab a title title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip() # Parse times into floats time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip() time_to_hundo = time_to_hundo.replace(',', '.') time_to_hundo = float(time_to_hundo) # Points may or may not be present, default to 0 if absent points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract() if not points: points = 0 else: points = int(points[0].strip()) yield { 'title': title, 'time to 100%': time_to_hundo, 'points': points, } def parse_search_result_for_apps(response): for href in response.xpath('//table//table//a/@href'): relative_url = href.extract() if relative_url.startswith('Steam_Game_Info.php?AppID='): yield { 'app_id' : relative_url[len('Steam_Game_Info.php?AppID='):] }
Fix up bad last commit
Fix up bad last commit
Python
mit
SingingTree/AStatsScraper,SingingTree/AStatsScraper
python
## Code Before: def parse_app_page(response): # Should always be able to grab a title title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip() # Parse times into floats time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip() time_to_hundo = time_to_hundo.replace(',', '.') time_to_hundo = float(time_to_hundo) # Points may or may not be present, default to 0 if absent points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract() if not points: points = 0 else: points = int(points[0].strip()) yield { 'title': title, 'time to 100%': time_to_hundo, 'points': points, } def parse_search_result_for_apps(response): for href in response.xpath('//table//table//a/@href'): relative_url = href.extract() if relative_url.startswith('Steam_Game_Info.php?AppID='): yield relative_url[:len('Steam_Game_Info.php?AppID=')] ## Instruction: Fix up bad last commit ## Code After: def parse_app_page(response): # Should always be able to grab a title title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip() # Parse times into floats time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip() time_to_hundo = time_to_hundo.replace(',', '.') time_to_hundo = float(time_to_hundo) # Points may or may not be present, default to 0 if absent points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract() if not points: points = 0 else: points = int(points[0].strip()) yield { 'title': title, 'time to 100%': time_to_hundo, 'points': points, } def parse_search_result_for_apps(response): for href in response.xpath('//table//table//a/@href'): relative_url = href.extract() if relative_url.startswith('Steam_Game_Info.php?AppID='): yield { 'app_id' : relative_url[len('Steam_Game_Info.php?AppID='):] }
... 'points': points, } def parse_search_result_for_apps(response): for href in response.xpath('//table//table//a/@href'): relative_url = href.extract() if relative_url.startswith('Steam_Game_Info.php?AppID='): yield { 'app_id' : relative_url[len('Steam_Game_Info.php?AppID='):] } ...
5b63b724a89c11aa06a8281cf778063c4a8a7096
Artifactor/views.py
Artifactor/views.py
from django.forms import ModelForm from django.http import Http404, HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.views.decorators.csrf import csrf_exempt from Artifactor.models import Artifact def index(request): artifacts = Artifact.objects.all() return render_to_response('Artifactor/index.html', {'artifacts': artifacts}, context_instance=RequestContext(request)) class ArtifactForm(ModelForm): class Meta: model = Artifact fields = ('path',) @csrf_exempt def post(request): if request.method == 'POST': form = ArtifactForm(request.POST, request.FILES) if form.is_valid(): form.save() return HttpResponseRedirect('/') else: raise Http404
from django.forms import ModelForm from django.http import Http404, HttpResponse from django.shortcuts import render_to_response from django.template import RequestContext from django.views.decorators.csrf import csrf_exempt from Artifactor.models import Artifact def index(request): artifacts = Artifact.objects.all() return render_to_response('Artifactor/index.html', {'artifacts': artifacts}, context_instance=RequestContext(request)) class ArtifactForm(ModelForm): class Meta: model = Artifact fields = ('path',) @csrf_exempt def post(request): if request.method == 'POST': form = ArtifactForm(request.POST, request.FILES) if form.is_valid(): artifact = form.save() return HttpResponse(artifact.path.url, content_type='text/plain') else: raise Http404
Return the right URL when a file is posted
Return the right URL when a file is posted
Python
mit
ivoire/Artifactorial,ivoire/Artifactorial,ivoire/Artifactorial
python
## Code Before: from django.forms import ModelForm from django.http import Http404, HttpResponseRedirect from django.shortcuts import render_to_response from django.template import RequestContext from django.views.decorators.csrf import csrf_exempt from Artifactor.models import Artifact def index(request): artifacts = Artifact.objects.all() return render_to_response('Artifactor/index.html', {'artifacts': artifacts}, context_instance=RequestContext(request)) class ArtifactForm(ModelForm): class Meta: model = Artifact fields = ('path',) @csrf_exempt def post(request): if request.method == 'POST': form = ArtifactForm(request.POST, request.FILES) if form.is_valid(): form.save() return HttpResponseRedirect('/') else: raise Http404 ## Instruction: Return the right URL when a file is posted ## Code After: from django.forms import ModelForm from django.http import Http404, HttpResponse from django.shortcuts import render_to_response from django.template import RequestContext from django.views.decorators.csrf import csrf_exempt from Artifactor.models import Artifact def index(request): artifacts = Artifact.objects.all() return render_to_response('Artifactor/index.html', {'artifacts': artifacts}, context_instance=RequestContext(request)) class ArtifactForm(ModelForm): class Meta: model = Artifact fields = ('path',) @csrf_exempt def post(request): if request.method == 'POST': form = ArtifactForm(request.POST, request.FILES) if form.is_valid(): artifact = form.save() return HttpResponse(artifact.path.url, content_type='text/plain') else: raise Http404
# ... existing code ... from django.forms import ModelForm from django.http import Http404, HttpResponse from django.shortcuts import render_to_response from django.template import RequestContext from django.views.decorators.csrf import csrf_exempt # ... modified code ... if request.method == 'POST': form = ArtifactForm(request.POST, request.FILES) if form.is_valid(): artifact = form.save() return HttpResponse(artifact.path.url, content_type='text/plain') else: raise Http404 # ... rest of the code ...
f225ffecf061470b877388d26c1605248b9611da
ygorcam.py
ygorcam.py
import tempfile import subprocess import web urls = ("/camera", "Camera") app = web.application(urls, globals()) class Camera(object): def GET(self): with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp: process = subprocess.Popen(["picamera", "-o", tfp.name]) stdout, stderr = process.communicate() if process.returncode: raise Exception((stdout, stderr)) web.header("Content-Type", "image/jpeg") return tfp.read() if __name__ == "__main__": app.run()
import tempfile import subprocess import web urls = ("/camera", "Camera") app = web.application(urls, globals()) class Camera(object): def GET(self): with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp: process = subprocess.Popen(["raspistill", "-o", tfp.name]) stdout, stderr = process.communicate() if process.returncode: raise Exception((process.returncode, stdout, stderr)) web.header("Content-Type", "image/jpeg") return tfp.read() if __name__ == "__main__": app.run()
Use raspistill and provide some additional error info
Use raspistill and provide some additional error info
Python
mit
f0rk/ygorcam
python
## Code Before: import tempfile import subprocess import web urls = ("/camera", "Camera") app = web.application(urls, globals()) class Camera(object): def GET(self): with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp: process = subprocess.Popen(["picamera", "-o", tfp.name]) stdout, stderr = process.communicate() if process.returncode: raise Exception((stdout, stderr)) web.header("Content-Type", "image/jpeg") return tfp.read() if __name__ == "__main__": app.run() ## Instruction: Use raspistill and provide some additional error info ## Code After: import tempfile import subprocess import web urls = ("/camera", "Camera") app = web.application(urls, globals()) class Camera(object): def GET(self): with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp: process = subprocess.Popen(["raspistill", "-o", tfp.name]) stdout, stderr = process.communicate() if process.returncode: raise Exception((process.returncode, stdout, stderr)) web.header("Content-Type", "image/jpeg") return tfp.read() if __name__ == "__main__": app.run()
... class Camera(object): def GET(self): with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp: process = subprocess.Popen(["raspistill", "-o", tfp.name]) stdout, stderr = process.communicate() if process.returncode: raise Exception((process.returncode, stdout, stderr)) web.header("Content-Type", "image/jpeg") return tfp.read() ...
574b0a30102889ca6e9854bced61999dcfe941b7
src/main/java/com/skelril/skree/content/registry/item/currency/CondensedCofferItem.java
src/main/java/com/skelril/skree/content/registry/item/currency/CondensedCofferItem.java
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.skelril.skree.content.registry.item.currency; import com.skelril.nitro.registry.item.CraftableItem; import net.minecraft.item.ItemStack; import net.minecraftforge.fml.common.registry.GameRegistry; public class CondensedCofferItem extends CofferItem implements CraftableItem { private CofferItem parent; public CondensedCofferItem(String ID, CofferItem parent) { super(ID, parent.getCofferValue() * 9); this.parent = parent; } public CofferItem getParent() { return parent; } @Override public void registerRecipes() { GameRegistry.addRecipe( new ItemStack(this), "AAA", "AAA", "AAA", 'A', new ItemStack(parent) ); } }
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.skelril.skree.content.registry.item.currency; import com.skelril.nitro.registry.item.CraftableItem; import net.minecraft.item.ItemStack; import net.minecraftforge.fml.common.registry.GameRegistry; public class CondensedCofferItem extends CofferItem implements CraftableItem { private CofferItem parent; public CondensedCofferItem(String ID, CofferItem parent) { super(ID, parent.getCofferValue() * 9); this.parent = parent; } public CofferItem getParent() { return parent; } @Override public void registerRecipes() { GameRegistry.addRecipe( new ItemStack(this), "AAA", "AAA", "AAA", 'A', new ItemStack(parent) ); GameRegistry.addShapelessRecipe( new ItemStack(parent, 9), new ItemStack(this) ); } }
Add a recipe for making the parent item from the created currency item
Add a recipe for making the parent item from the created currency item
Java
mpl-2.0
Skelril/Skree
java
## Code Before: /* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.skelril.skree.content.registry.item.currency; import com.skelril.nitro.registry.item.CraftableItem; import net.minecraft.item.ItemStack; import net.minecraftforge.fml.common.registry.GameRegistry; public class CondensedCofferItem extends CofferItem implements CraftableItem { private CofferItem parent; public CondensedCofferItem(String ID, CofferItem parent) { super(ID, parent.getCofferValue() * 9); this.parent = parent; } public CofferItem getParent() { return parent; } @Override public void registerRecipes() { GameRegistry.addRecipe( new ItemStack(this), "AAA", "AAA", "AAA", 'A', new ItemStack(parent) ); } } ## Instruction: Add a recipe for making the parent item from the created currency item ## Code After: /* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.skelril.skree.content.registry.item.currency; import com.skelril.nitro.registry.item.CraftableItem; import net.minecraft.item.ItemStack; import net.minecraftforge.fml.common.registry.GameRegistry; public class CondensedCofferItem extends CofferItem implements CraftableItem { private CofferItem parent; public CondensedCofferItem(String ID, CofferItem parent) { super(ID, parent.getCofferValue() * 9); this.parent = parent; } public CofferItem getParent() { return parent; } @Override public void registerRecipes() { GameRegistry.addRecipe( new ItemStack(this), "AAA", "AAA", "AAA", 'A', new ItemStack(parent) ); GameRegistry.addShapelessRecipe( new ItemStack(parent, 9), new ItemStack(this) ); } }
// ... existing code ... "AAA", 'A', new ItemStack(parent) ); GameRegistry.addShapelessRecipe( new ItemStack(parent, 9), new ItemStack(this) ); } } // ... rest of the code ...
9ce90b52bff35d5d0ad87d2402a5e8a946938cf7
sideloader/forms.py
sideloader/forms.py
from django.contrib.auth.models import User from django import forms from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit import models class BaseModelForm(forms.ModelForm): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class BaseForm(forms.Form): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class ReleaseForm(BaseModelForm): class Meta: model = models.ReleaseStream class ProjectForm(BaseModelForm): github_url = forms.CharField(label="Git checkout URL") class Meta: model = models.Project exclude = ('idhash', 'created_by_user',) def clean(self): cleaned_data = super(ProjectForm, self).clean() uri = cleaned_data['github_url'].strip() if not (uri[-4:] == '.git'): raise forms.ValidationError("Not a valid Git URI") cleaned_data['github_url'] = uri return cleaned_data class UserForm(BaseModelForm): password = forms.CharField(widget=forms.PasswordInput(), initial='') class Meta: model = User exclude = ( 'email', 'username', 'is_staff', 'is_active', 'is_superuser', 'last_login', 'date_joined', 'groups', 'user_permissions' )
from django.contrib.auth.models import User from django import forms from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit import models class BaseModelForm(forms.ModelForm): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class BaseForm(forms.Form): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class ReleaseForm(BaseModelForm): class Meta: model = models.ReleaseStream class ProjectForm(BaseModelForm): github_url = forms.CharField(label="Git checkout URL") allowed_users = forms.ModelMultipleChoiceField( queryset=User.objects.all().order_by('username'), required=False, widget=forms.widgets.CheckboxSelectMultiple ) class Meta: model = models.Project exclude = ('idhash', 'created_by_user',) def clean(self): cleaned_data = super(ProjectForm, self).clean() uri = cleaned_data['github_url'].strip() if not (uri[-4:] == '.git'): raise forms.ValidationError("Not a valid Git URI") cleaned_data['github_url'] = uri return cleaned_data class UserForm(BaseModelForm): password = forms.CharField(widget=forms.PasswordInput(), initial='') class Meta: model = User exclude = ( 'email', 'username', 'is_staff', 'is_active', 'is_superuser', 'last_login', 'date_joined', 'groups', 'user_permissions' )
Improve the project form a bit
Improve the project form a bit
Python
mit
praekelt/sideloader,praekelt/sideloader,praekelt/sideloader,praekelt/sideloader
python
## Code Before: from django.contrib.auth.models import User from django import forms from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit import models class BaseModelForm(forms.ModelForm): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class BaseForm(forms.Form): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class ReleaseForm(BaseModelForm): class Meta: model = models.ReleaseStream class ProjectForm(BaseModelForm): github_url = forms.CharField(label="Git checkout URL") class Meta: model = models.Project exclude = ('idhash', 'created_by_user',) def clean(self): cleaned_data = super(ProjectForm, self).clean() uri = cleaned_data['github_url'].strip() if not (uri[-4:] == '.git'): raise forms.ValidationError("Not a valid Git URI") cleaned_data['github_url'] = uri return cleaned_data class UserForm(BaseModelForm): password = forms.CharField(widget=forms.PasswordInput(), initial='') class Meta: model = User exclude = ( 'email', 'username', 'is_staff', 'is_active', 'is_superuser', 'last_login', 'date_joined', 'groups', 'user_permissions' ) ## Instruction: Improve the project form a bit ## Code After: from django.contrib.auth.models import User from django import forms from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit import models class BaseModelForm(forms.ModelForm): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class BaseForm(forms.Form): helper = FormHelper() helper.form_class = 'form-horizontal' helper.add_input(Submit('submit', 'Submit')) class ReleaseForm(BaseModelForm): class Meta: model = models.ReleaseStream class ProjectForm(BaseModelForm): github_url = forms.CharField(label="Git checkout URL") allowed_users = forms.ModelMultipleChoiceField( queryset=User.objects.all().order_by('username'), required=False, widget=forms.widgets.CheckboxSelectMultiple ) class Meta: model = models.Project exclude = ('idhash', 'created_by_user',) def clean(self): cleaned_data = super(ProjectForm, self).clean() uri = cleaned_data['github_url'].strip() if not (uri[-4:] == '.git'): raise forms.ValidationError("Not a valid Git URI") cleaned_data['github_url'] = uri return cleaned_data class UserForm(BaseModelForm): password = forms.CharField(widget=forms.PasswordInput(), initial='') class Meta: model = User exclude = ( 'email', 'username', 'is_staff', 'is_active', 'is_superuser', 'last_login', 'date_joined', 'groups', 'user_permissions' )
# ... existing code ... class ProjectForm(BaseModelForm): github_url = forms.CharField(label="Git checkout URL") allowed_users = forms.ModelMultipleChoiceField( queryset=User.objects.all().order_by('username'), required=False, widget=forms.widgets.CheckboxSelectMultiple ) class Meta: model = models.Project exclude = ('idhash', 'created_by_user',) # ... rest of the code ...
8dbd89ba8335b7c8e139de177cd1d86fa38e60a9
src/main/java/org/springframework/indexer/WebDocumentSearchEntryMapper.java
src/main/java/org/springframework/indexer/WebDocumentSearchEntryMapper.java
package org.springframework.indexer; import org.jsoup.nodes.Document; import org.springframework.search.SearchEntry; import org.springframework.search.SearchEntryMapper; import java.util.Date; public class WebDocumentSearchEntryMapper implements SearchEntryMapper<WebDocument> { @Override public SearchEntry map(WebDocument document) { Document input = document.getDocument(); SearchEntry entry = new SearchEntry(); entry.setId(document.getPath()); entry.setPublishAt(new Date(0L)); String text = input.text(); entry.setRawContent(text); entry.setSummary(text.substring(0, Math.min(500, text.length()))); entry.setTitle(input.title()); entry.setPath(document.getPath()); return entry; } }
package org.springframework.indexer; import org.jsoup.nodes.Document; import org.springframework.search.SearchEntry; import org.springframework.search.SearchEntryMapper; import java.util.Date; public class WebDocumentSearchEntryMapper implements SearchEntryMapper<WebDocument> { @Override public SearchEntry map(WebDocument document) { Document input = document.getDocument(); SearchEntry entry = new SearchEntry(); entry.setId(document.getPath().replaceAll("/", "_")); entry.setPublishAt(new Date(0L)); String text = input.text(); entry.setRawContent(text); entry.setSummary(text.substring(0, Math.min(500, text.length()))); entry.setTitle(input.title()); entry.setPath(document.getPath()); return entry; } }
Fix broken api docs index
Fix broken api docs index - api docs entries Id's were full url paths which broke elasticsearch indexing. Replaced '/' with _ in these ids
Java
bsd-3-clause
amit-siddhu/sagan,vizewang/sagan,dvaughn555/sagan,dlizarra/sagan,eonezhang/sagan,andriykonoz/sagan,TabberGit/sagan,spring-io/sagan,seoilhyun/sagan,rwinch/sagan,seoilhyun/sagan,dsyer/sagan,dlizarra/sagan,rwinch/sagan,garylgh/sagan,dkushner/sagan,vizewang/sagan,eonezhang/sagan,yhj630520/sagan,garylgh/sagan,lowtalker/sagan,spring-io/sagan,BHASKARSDEN277GITHUB/sagan,lowtalker/sagan,lowtalker/sagan,dkushner/sagan,dharmaraju/sagan,mbateman/sagan,TabberGit/sagan,yhj630520/sagan,andriykonoz/sagan,seoilhyun/sagan,amit-siddhu/sagan,dvaughn555/sagan,BHASKARSDEN277GITHUB/sagan,vizewang/sagan,spring-io/sagan,dkushner/sagan,garylgh/sagan,vizewang/sagan,dharmaraju/sagan,spring-io/sagan,andriykonoz/sagan,dvaughn555/sagan,mbateman/sagan,eonezhang/sagan,yhj630520/sagan,martinlippert/sagan,mbateman/sagan,andriykonoz/sagan,dharmaraju/sagan,dsyer/sagan,dkushner/sagan,dsyer/sagan,martinlippert/sagan,dlizarra/sagan,TabberGit/sagan,mbateman/sagan,BHASKARSDEN277GITHUB/sagan,BHASKARSDEN277GITHUB/sagan,dharmaraju/sagan,martinlippert/sagan,TabberGit/sagan,rwinch/sagan,rwinch/sagan,garylgh/sagan,seoilhyun/sagan,dsyer/sagan,yhj630520/sagan,martinlippert/sagan,gregturn/sagan,lowtalker/sagan,dlizarra/sagan,amit-siddhu/sagan,gregturn/sagan,dvaughn555/sagan,amit-siddhu/sagan,gregturn/sagan,eonezhang/sagan
java
## Code Before: package org.springframework.indexer; import org.jsoup.nodes.Document; import org.springframework.search.SearchEntry; import org.springframework.search.SearchEntryMapper; import java.util.Date; public class WebDocumentSearchEntryMapper implements SearchEntryMapper<WebDocument> { @Override public SearchEntry map(WebDocument document) { Document input = document.getDocument(); SearchEntry entry = new SearchEntry(); entry.setId(document.getPath()); entry.setPublishAt(new Date(0L)); String text = input.text(); entry.setRawContent(text); entry.setSummary(text.substring(0, Math.min(500, text.length()))); entry.setTitle(input.title()); entry.setPath(document.getPath()); return entry; } } ## Instruction: Fix broken api docs index - api docs entries Id's were full url paths which broke elasticsearch indexing. Replaced '/' with _ in these ids ## Code After: package org.springframework.indexer; import org.jsoup.nodes.Document; import org.springframework.search.SearchEntry; import org.springframework.search.SearchEntryMapper; import java.util.Date; public class WebDocumentSearchEntryMapper implements SearchEntryMapper<WebDocument> { @Override public SearchEntry map(WebDocument document) { Document input = document.getDocument(); SearchEntry entry = new SearchEntry(); entry.setId(document.getPath().replaceAll("/", "_")); entry.setPublishAt(new Date(0L)); String text = input.text(); entry.setRawContent(text); entry.setSummary(text.substring(0, Math.min(500, text.length()))); entry.setTitle(input.title()); entry.setPath(document.getPath()); return entry; } }
// ... existing code ... public SearchEntry map(WebDocument document) { Document input = document.getDocument(); SearchEntry entry = new SearchEntry(); entry.setId(document.getPath().replaceAll("/", "_")); entry.setPublishAt(new Date(0L)); String text = input.text(); entry.setRawContent(text); // ... rest of the code ...
e2f2fbc0df695102c4d51bdf0e633798c3ae8417
yawf/messages/submessage.py
yawf/messages/submessage.py
from . import Message class Submessage(object): need_lock_object = True def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True): self.obj = obj self.sender = sender self.message_id = message_id self.raw_params = raw_params self.need_lock_object = need_lock_object super(Submessage, self).__init__() def as_message(self, parent): return Message(self.sender, self.message_id, self.raw_params, parent_message_id=parent.unique_id, message_group=parent.message_group, ) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( self.obj, message=message, defer_side_effect=True, need_lock_object=self.need_lock_object) class RecursiveSubmessage(Submessage): def __init__(self, message_id, sender, raw_params): super(RecursiveSubmessage, self).__init__( obj=None, sender=sender, message_id=message_id, raw_params=raw_params) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( parent_obj, message=message, defer_side_effect=True, need_lock_object=False)
from . import Message class Submessage(object): need_lock_object = True def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True): self.obj = obj self.sender = sender self.message_id = message_id self.raw_params = raw_params self.need_lock_object = need_lock_object super(Submessage, self).__init__() def as_message(self, parent): return Message(self.sender, self.message_id, self.raw_params, parent_message_id=parent.unique_id, message_group=parent.message_group, ) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( self.obj, message=message, defer_side_effect=True, need_lock_object=self.need_lock_object) class RecursiveSubmessage(Submessage): def __init__(self, message_id, sender, raw_params=None): super(RecursiveSubmessage, self).__init__( obj=None, sender=sender, message_id=message_id, raw_params=raw_params) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( parent_obj, message=message, defer_side_effect=True, need_lock_object=False)
Make raw_params an optional argument in Submessage
Make raw_params an optional argument in Submessage
Python
mit
freevoid/yawf
python
## Code Before: from . import Message class Submessage(object): need_lock_object = True def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True): self.obj = obj self.sender = sender self.message_id = message_id self.raw_params = raw_params self.need_lock_object = need_lock_object super(Submessage, self).__init__() def as_message(self, parent): return Message(self.sender, self.message_id, self.raw_params, parent_message_id=parent.unique_id, message_group=parent.message_group, ) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( self.obj, message=message, defer_side_effect=True, need_lock_object=self.need_lock_object) class RecursiveSubmessage(Submessage): def __init__(self, message_id, sender, raw_params): super(RecursiveSubmessage, self).__init__( obj=None, sender=sender, message_id=message_id, raw_params=raw_params) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( parent_obj, message=message, defer_side_effect=True, need_lock_object=False) ## Instruction: Make raw_params an optional argument in Submessage ## Code After: from . import Message class Submessage(object): need_lock_object = True def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True): self.obj = obj self.sender = sender self.message_id = message_id self.raw_params = raw_params self.need_lock_object = need_lock_object super(Submessage, self).__init__() def as_message(self, parent): return Message(self.sender, self.message_id, self.raw_params, parent_message_id=parent.unique_id, message_group=parent.message_group, ) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( self.obj, message=message, defer_side_effect=True, need_lock_object=self.need_lock_object) class RecursiveSubmessage(Submessage): def __init__(self, message_id, sender, raw_params=None): super(RecursiveSubmessage, self).__init__( obj=None, sender=sender, message_id=message_id, raw_params=raw_params) def dispatch(self, parent_obj, parent_message): from yawf.dispatch import dispatch_message message = self.as_message(parent_message) return dispatch_message( parent_obj, message=message, defer_side_effect=True, need_lock_object=False)
# ... existing code ... need_lock_object = True def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True): self.obj = obj self.sender = sender self.message_id = message_id # ... modified code ... class RecursiveSubmessage(Submessage): def __init__(self, message_id, sender, raw_params=None): super(RecursiveSubmessage, self).__init__( obj=None, sender=sender, message_id=message_id, raw_params=raw_params) # ... rest of the code ...
36bde060bbdb4cf9d0396719b8b82952a73bf2b5
bucky/collector.py
bucky/collector.py
import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) err = 0 while True: start_timestamp = time.time() if not self.collect(): err = min(err + 1, 2) else: err = 0 stop_timestamp = time.time() sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp) if sleep_time > 0.1: time.sleep(sleep_time) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) interval = self.interval while True: start_timestamp = time.time() interval = self.interval if self.collect() else interval+interval stop_timestamp = time.time() interval = min(interval, 300) interval = interval - (stop_timestamp - start_timestamp) if interval > 0.1: time.sleep(interval) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
Change the back-off algo for failures
Change the back-off algo for failures
Python
apache-2.0
jsiembida/bucky3
python
## Code Before: import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) err = 0 while True: start_timestamp = time.time() if not self.collect(): err = min(err + 1, 2) else: err = 0 stop_timestamp = time.time() sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp) if sleep_time > 0.1: time.sleep(sleep_time) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret ## Instruction: Change the back-off algo for failures ## Code After: import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) interval = self.interval while True: start_timestamp = time.time() interval = self.interval if self.collect() else interval+interval stop_timestamp = time.time() interval = min(interval, 300) interval = interval - (stop_timestamp - start_timestamp) if interval > 0.1: time.sleep(interval) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
# ... existing code ... def run(self): setproctitle("bucky: %s" % self.__class__.__name__) interval = self.interval while True: start_timestamp = time.time() interval = self.interval if self.collect() else interval+interval stop_timestamp = time.time() interval = min(interval, 300) interval = interval - (stop_timestamp - start_timestamp) if interval > 0.1: time.sleep(interval) def collect(self): raise NotImplementedError() # ... rest of the code ...
326bebb58242981ec66f257525e5c5f58fae9196
example/article/admin.py
example/article/admin.py
from django.contrib import admin from django.contrib.admin.widgets import AdminTextInputWidget, AdminTextareaWidget from parler.admin import TranslatableAdmin from .models import Article from parler.forms import TranslatableModelForm, TranslatedField class ArticleAdminForm(TranslatableModelForm): """ Example form Translated fields can be enhanced by manually declaring them: """ title = TranslatedField(widget=AdminTextInputWidget) content = TranslatedField(widget=AdminTextareaWidget) class ArticleAdmin(TranslatableAdmin): """ Example admin. Using an empty class would already work, but this example shows some additional options. """ # The 'language_column' is provided by the base class: list_display = ('title', 'language_column') # Example custom form usage. form = ArticleAdminForm # NOTE: when using Django 1.4, use declared_fieldsets= instead of fieldsets= fieldsets = ( (None, { 'fields': ('title', 'slug', 'published'), }), ("Contents", { 'fields': ('content',), }) ) def get_prepopulated_fields(self, request, obj=None): # Can't use prepopulated_fields= yet, but this is a workaround. return {'slug': ('title',)} admin.site.register(Article, ArticleAdmin)
from django.contrib import admin from django.contrib.admin.widgets import AdminTextInputWidget, AdminTextareaWidget from parler.admin import TranslatableAdmin from .models import Article from parler.forms import TranslatableModelForm, TranslatedField class ArticleAdminForm(TranslatableModelForm): """ Example form Translated fields can be enhanced by manually declaring them: """ title = TranslatedField(widget=AdminTextInputWidget) content = TranslatedField(widget=AdminTextareaWidget) class ArticleAdmin(TranslatableAdmin): """ Example admin. Using an empty class would already work, but this example shows some additional options. """ # The 'language_column' is provided by the base class: list_display = ('title', 'language_column') list_filter = ('published',) # Example custom form usage. form = ArticleAdminForm # NOTE: when using Django 1.4, use declared_fieldsets= instead of fieldsets= fieldsets = ( (None, { 'fields': ('title', 'slug', 'published'), }), ("Contents", { 'fields': ('content',), }) ) def get_prepopulated_fields(self, request, obj=None): # Can't use prepopulated_fields= yet, but this is a workaround. return {'slug': ('title',)} admin.site.register(Article, ArticleAdmin)
Add list_filter to example ArticleAdmin
Add list_filter to example ArticleAdmin When using a list filter and then adding or editing an object the language GET parameter goes missing causing the wrong translation to be edited.
Python
apache-2.0
django-parler/django-parler,jrief/django-parler,edoburu/django-parler,HiddenData/django-parler,skirsdeda/django-parler,edoburu/django-parler,zhangguiyu/django-parler,jrief/django-parler,django-parler/django-parler,imposeren/django-parler,defivelo/django-parler,imposeren/django-parler,zhangguiyu/django-parler,defivelo/django-parler,ellmetha/django-parler,HiddenData/django-parler,skirsdeda/django-parler,ellmetha/django-parler
python
## Code Before: from django.contrib import admin from django.contrib.admin.widgets import AdminTextInputWidget, AdminTextareaWidget from parler.admin import TranslatableAdmin from .models import Article from parler.forms import TranslatableModelForm, TranslatedField class ArticleAdminForm(TranslatableModelForm): """ Example form Translated fields can be enhanced by manually declaring them: """ title = TranslatedField(widget=AdminTextInputWidget) content = TranslatedField(widget=AdminTextareaWidget) class ArticleAdmin(TranslatableAdmin): """ Example admin. Using an empty class would already work, but this example shows some additional options. """ # The 'language_column' is provided by the base class: list_display = ('title', 'language_column') # Example custom form usage. form = ArticleAdminForm # NOTE: when using Django 1.4, use declared_fieldsets= instead of fieldsets= fieldsets = ( (None, { 'fields': ('title', 'slug', 'published'), }), ("Contents", { 'fields': ('content',), }) ) def get_prepopulated_fields(self, request, obj=None): # Can't use prepopulated_fields= yet, but this is a workaround. return {'slug': ('title',)} admin.site.register(Article, ArticleAdmin) ## Instruction: Add list_filter to example ArticleAdmin When using a list filter and then adding or editing an object the language GET parameter goes missing causing the wrong translation to be edited. ## Code After: from django.contrib import admin from django.contrib.admin.widgets import AdminTextInputWidget, AdminTextareaWidget from parler.admin import TranslatableAdmin from .models import Article from parler.forms import TranslatableModelForm, TranslatedField class ArticleAdminForm(TranslatableModelForm): """ Example form Translated fields can be enhanced by manually declaring them: """ title = TranslatedField(widget=AdminTextInputWidget) content = TranslatedField(widget=AdminTextareaWidget) class ArticleAdmin(TranslatableAdmin): """ Example admin. Using an empty class would already work, but this example shows some additional options. """ # The 'language_column' is provided by the base class: list_display = ('title', 'language_column') list_filter = ('published',) # Example custom form usage. form = ArticleAdminForm # NOTE: when using Django 1.4, use declared_fieldsets= instead of fieldsets= fieldsets = ( (None, { 'fields': ('title', 'slug', 'published'), }), ("Contents", { 'fields': ('content',), }) ) def get_prepopulated_fields(self, request, obj=None): # Can't use prepopulated_fields= yet, but this is a workaround. return {'slug': ('title',)} admin.site.register(Article, ArticleAdmin)
// ... existing code ... # The 'language_column' is provided by the base class: list_display = ('title', 'language_column') list_filter = ('published',) # Example custom form usage. form = ArticleAdminForm // ... rest of the code ...
fe0a1a818d8590065d0f3e48b8b22644f6206a79
app/src/main/java/com/aviras/mrassistant/models/Order.java
app/src/main/java/com/aviras/mrassistant/models/Order.java
package com.aviras.mrassistant.models; import java.util.ArrayList; import java.util.List; import io.realm.annotations.PrimaryKey; /** * Represent order * <p/> * Created by ashish on 8/6/16. */ public class Order { @PrimaryKey private int id; private Doctor doctor; private List<Medicine> medicines = new ArrayList<>(); private long createdDate; private long expectedDeliveryDate; private long actualDeliveryDate; }
package com.aviras.mrassistant.models; import java.util.ArrayList; import java.util.List; import io.realm.annotations.PrimaryKey; /** * Represent order * <p/> * Created by ashish on 8/6/16. */ public class Order { @PrimaryKey private int id; private Doctor doctor; private List<OrderItem> items = new ArrayList<>(); private long createdDate; private long expectedDeliveryDate; private long actualDeliveryDate; public int getId() { return id; } public void setId(int id) { this.id = id; } public Doctor getDoctor() { return doctor; } public void setDoctor(Doctor doctor) { this.doctor = doctor; } public List<OrderItem> getItems() { return items; } public void setItems(List<OrderItem> items) { this.items = items; } public long getCreatedDate() { return createdDate; } public void setCreatedDate(long createdDate) { this.createdDate = createdDate; } public long getExpectedDeliveryDate() { return expectedDeliveryDate; } public void setExpectedDeliveryDate(long expectedDeliveryDate) { this.expectedDeliveryDate = expectedDeliveryDate; } public long getActualDeliveryDate() { return actualDeliveryDate; } public void setActualDeliveryDate(long actualDeliveryDate) { this.actualDeliveryDate = actualDeliveryDate; } }
Correct the definition of order
Correct the definition of order
Java
apache-2.0
pathakashish/MR-Assistant
java
## Code Before: package com.aviras.mrassistant.models; import java.util.ArrayList; import java.util.List; import io.realm.annotations.PrimaryKey; /** * Represent order * <p/> * Created by ashish on 8/6/16. */ public class Order { @PrimaryKey private int id; private Doctor doctor; private List<Medicine> medicines = new ArrayList<>(); private long createdDate; private long expectedDeliveryDate; private long actualDeliveryDate; } ## Instruction: Correct the definition of order ## Code After: package com.aviras.mrassistant.models; import java.util.ArrayList; import java.util.List; import io.realm.annotations.PrimaryKey; /** * Represent order * <p/> * Created by ashish on 8/6/16. */ public class Order { @PrimaryKey private int id; private Doctor doctor; private List<OrderItem> items = new ArrayList<>(); private long createdDate; private long expectedDeliveryDate; private long actualDeliveryDate; public int getId() { return id; } public void setId(int id) { this.id = id; } public Doctor getDoctor() { return doctor; } public void setDoctor(Doctor doctor) { this.doctor = doctor; } public List<OrderItem> getItems() { return items; } public void setItems(List<OrderItem> items) { this.items = items; } public long getCreatedDate() { return createdDate; } public void setCreatedDate(long createdDate) { this.createdDate = createdDate; } public long getExpectedDeliveryDate() { return expectedDeliveryDate; } public void setExpectedDeliveryDate(long expectedDeliveryDate) { this.expectedDeliveryDate = expectedDeliveryDate; } public long getActualDeliveryDate() { return actualDeliveryDate; } public void setActualDeliveryDate(long actualDeliveryDate) { this.actualDeliveryDate = actualDeliveryDate; } }
// ... existing code ... private Doctor doctor; private List<OrderItem> items = new ArrayList<>(); private long createdDate; private long expectedDeliveryDate; private long actualDeliveryDate; public int getId() { return id; } public void setId(int id) { this.id = id; } public Doctor getDoctor() { return doctor; } public void setDoctor(Doctor doctor) { this.doctor = doctor; } public List<OrderItem> getItems() { return items; } public void setItems(List<OrderItem> items) { this.items = items; } public long getCreatedDate() { return createdDate; } public void setCreatedDate(long createdDate) { this.createdDate = createdDate; } public long getExpectedDeliveryDate() { return expectedDeliveryDate; } public void setExpectedDeliveryDate(long expectedDeliveryDate) { this.expectedDeliveryDate = expectedDeliveryDate; } public long getActualDeliveryDate() { return actualDeliveryDate; } public void setActualDeliveryDate(long actualDeliveryDate) { this.actualDeliveryDate = actualDeliveryDate; } } // ... rest of the code ...
e5f00a6a5e71d8f5fe98547732f4c9e15a3efc1e
src/nodeconductor_paas_oracle/apps.py
src/nodeconductor_paas_oracle/apps.py
from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from .backend import OracleBackend SupportedServices.register_backend(OracleBackend)
from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from nodeconductor.cost_tracking import CostTrackingRegister from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) # cost tracking from .cost_tracking import OracleCostTrackingBackend CostTrackingRegister.register(self.label, OracleCostTrackingBackend)
Add registration to cost tracking
Add registration to cost tracking
Python
mit
opennode/nodeconductor-paas-oracle
python
## Code Before: from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) ## Instruction: Add registration to cost tracking ## Code After: from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from nodeconductor.cost_tracking import CostTrackingRegister from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) # cost tracking from .cost_tracking import OracleCostTrackingBackend CostTrackingRegister.register(self.label, OracleCostTrackingBackend)
# ... existing code ... def ready(self): from nodeconductor.structure import SupportedServices from nodeconductor.cost_tracking import CostTrackingRegister from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) # cost tracking from .cost_tracking import OracleCostTrackingBackend CostTrackingRegister.register(self.label, OracleCostTrackingBackend) # ... rest of the code ...
49ac4dc3e7506f35d2f3ad695afaf9c89f08720b
setup.py
setup.py
import os from setuptools import setup base_dir = os.path.dirname(__file__) with open(os.path.join(base_dir, "README.rst")) as f: long_description = f.read() setup( name="TxSNI", description="easy-to-use SNI endpoint for twisted", packages=[ "txsni", "twisted.plugins", ], install_requires=[ "Twisted[tls]>=14.0", "pyOpenSSL>=0.14", ], version="0.1.6", long_description=long_description, license="MIT", url="https://github.com/glyph/txsni", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography", ], )
import os from setuptools import setup base_dir = os.path.dirname(__file__) with open(os.path.join(base_dir, "README.rst")) as f: long_description = f.read() setup( name="TxSNI", description="easy-to-use SNI endpoint for twisted", packages=[ "txsni", "txsni.test", "txsni.test.certs", "twisted.plugins", ], install_requires=[ "Twisted[tls]>=14.0", "pyOpenSSL>=0.14", ], version="0.1.6", long_description=long_description, license="MIT", url="https://github.com/glyph/txsni", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography", ], )
Install the tests and test utilities
Install the tests and test utilities
Python
mit
glyph/txsni
python
## Code Before: import os from setuptools import setup base_dir = os.path.dirname(__file__) with open(os.path.join(base_dir, "README.rst")) as f: long_description = f.read() setup( name="TxSNI", description="easy-to-use SNI endpoint for twisted", packages=[ "txsni", "twisted.plugins", ], install_requires=[ "Twisted[tls]>=14.0", "pyOpenSSL>=0.14", ], version="0.1.6", long_description=long_description, license="MIT", url="https://github.com/glyph/txsni", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography", ], ) ## Instruction: Install the tests and test utilities ## Code After: import os from setuptools import setup base_dir = os.path.dirname(__file__) with open(os.path.join(base_dir, "README.rst")) as f: long_description = f.read() setup( name="TxSNI", description="easy-to-use SNI endpoint for twisted", packages=[ "txsni", "txsni.test", "txsni.test.certs", "twisted.plugins", ], install_requires=[ "Twisted[tls]>=14.0", "pyOpenSSL>=0.14", ], version="0.1.6", long_description=long_description, license="MIT", url="https://github.com/glyph/txsni", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography", ], )
... description="easy-to-use SNI endpoint for twisted", packages=[ "txsni", "txsni.test", "txsni.test.certs", "twisted.plugins", ], install_requires=[ ...
fe5edfe737a774aa86cce578321fbb7fb4c8795e
tagcache/utils.py
tagcache/utils.py
import os import errno def ensure_intermediate_dir(path): """ Basiclly equivalent to command `mkdir -p` """ try: os.makedirs(os.path.dirname(path)) except OSError, e: if e.errno != errno.EEXIST: raise e def open_file(filename, flag, mode=0777): """ Wrapper of `os.open` which ensure intermediate dirs are created as well. """ try: return os.open(filename, flag, mode=mode) except OSError, e: if e.errno != errno.ENOENT or not (flag & os.O_CREAT): raise e # a directory component not exists ensure_intermediate_dir(filename) # second try return os.open(filename, flag, mode=mode) def link_file(src, dst): """ Wrapper of `os.link` which ensure intermediate dirs are created as well. """ try: return os.link(src, dst) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(dst) return os.link(src, dst) def rename_file(old, new): """ Wrapper of `os.rename` which ensure intermediate dirs are created as well. """ try: return os.rename(old, new) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(new) return os.rename(old, new)
import os import errno def ensure_intermediate_dir(path): """ Basiclly equivalent to command `mkdir -p` """ try: os.makedirs(os.path.dirname(path)) except OSError, e: if e.errno != errno.EEXIST: raise e def open_file(filename, flag, mode=0777): """ Wrapper of `os.open` which ensure intermediate dirs are created as well. """ try: return os.open(filename, flag, mode) except OSError, e: if e.errno != errno.ENOENT or not (flag & os.O_CREAT): raise e # a directory component not exists ensure_intermediate_dir(filename) # second try return os.open(filename, flag, mode) def link_file(src, dst): """ Wrapper of `os.link` which ensure intermediate dirs are created as well. """ try: return os.link(src, dst) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(dst) return os.link(src, dst) def rename_file(old, new): """ Wrapper of `os.rename` which ensure intermediate dirs are created as well. """ try: return os.rename(old, new) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(new) return os.rename(old, new)
Fix a bug in file_open (os.open does not take keyword argument).
Fix a bug in file_open (os.open does not take keyword argument).
Python
mit
huangjunwen/tagcache
python
## Code Before: import os import errno def ensure_intermediate_dir(path): """ Basiclly equivalent to command `mkdir -p` """ try: os.makedirs(os.path.dirname(path)) except OSError, e: if e.errno != errno.EEXIST: raise e def open_file(filename, flag, mode=0777): """ Wrapper of `os.open` which ensure intermediate dirs are created as well. """ try: return os.open(filename, flag, mode=mode) except OSError, e: if e.errno != errno.ENOENT or not (flag & os.O_CREAT): raise e # a directory component not exists ensure_intermediate_dir(filename) # second try return os.open(filename, flag, mode=mode) def link_file(src, dst): """ Wrapper of `os.link` which ensure intermediate dirs are created as well. """ try: return os.link(src, dst) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(dst) return os.link(src, dst) def rename_file(old, new): """ Wrapper of `os.rename` which ensure intermediate dirs are created as well. """ try: return os.rename(old, new) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(new) return os.rename(old, new) ## Instruction: Fix a bug in file_open (os.open does not take keyword argument). ## Code After: import os import errno def ensure_intermediate_dir(path): """ Basiclly equivalent to command `mkdir -p` """ try: os.makedirs(os.path.dirname(path)) except OSError, e: if e.errno != errno.EEXIST: raise e def open_file(filename, flag, mode=0777): """ Wrapper of `os.open` which ensure intermediate dirs are created as well. """ try: return os.open(filename, flag, mode) except OSError, e: if e.errno != errno.ENOENT or not (flag & os.O_CREAT): raise e # a directory component not exists ensure_intermediate_dir(filename) # second try return os.open(filename, flag, mode) def link_file(src, dst): """ Wrapper of `os.link` which ensure intermediate dirs are created as well. """ try: return os.link(src, dst) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(dst) return os.link(src, dst) def rename_file(old, new): """ Wrapper of `os.rename` which ensure intermediate dirs are created as well. """ try: return os.rename(old, new) except OSError, e: if e.errno != errno.ENOENT: raise e ensure_intermediate_dir(new) return os.rename(old, new)
... """ try: return os.open(filename, flag, mode) except OSError, e: ... ensure_intermediate_dir(filename) # second try return os.open(filename, flag, mode) def link_file(src, dst): ...
421e811242b737a7b1bf27814d70f719f345131b
watchlist/utils.py
watchlist/utils.py
from .models import ShiftSlot, weekday_loc from website.settings import WORKSHOP_OPEN_DAYS def get_shift_weekview_rows(): '''Returns a dictionary of shifts for each timeslot, for each weekday''' slots = ShiftSlot.objects.all() if not slots: return None # Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is! rows = {} for slot in slots: row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M')) if row_header not in rows: rows[row_header] = [] rows[row_header].append(slot) # Sort each list in the dict by weekday for time in rows.keys(): rows[time].sort(key=lambda slot: slot.weekday) return rows def get_shift_weekview_columns(): '''Returns a list of weekday name column headers to populate a weekview table with''' slots = ShiftSlot.objects.all() if not slots: return None cols = [] for slot in slots: col_header = slot.get_weekday_name() if col_header not in cols: cols.append(col_header) return cols
from .models import ShiftSlot, weekday_loc from website.settings import WORKSHOP_OPEN_DAYS def get_shift_weekview_rows(): '''Returns a dictionary of shifts for each timeslot, for each weekday''' slots = ShiftSlot.objects.all() if not slots: return None # Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is! rows = {} for slot in slots: row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M')) if row_header not in rows: rows[row_header] = [] rows[row_header].append(slot) # Sort each list in the dict by weekday for time in rows.keys(): rows[time].sort(key=lambda slot: slot.weekday) return rows def get_shift_weekview_columns(): '''Returns a list of weekday name column headers to populate a weekview table with''' slots = ShiftSlot.objects.all().order_by('weekday') if not slots: return None cols = [] for slot in slots: col_header = slot.get_weekday_name() if col_header not in cols: cols.append(col_header) return cols
Order weekday columns in watchlist
Order weekday columns in watchlist
Python
mit
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
python
## Code Before: from .models import ShiftSlot, weekday_loc from website.settings import WORKSHOP_OPEN_DAYS def get_shift_weekview_rows(): '''Returns a dictionary of shifts for each timeslot, for each weekday''' slots = ShiftSlot.objects.all() if not slots: return None # Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is! rows = {} for slot in slots: row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M')) if row_header not in rows: rows[row_header] = [] rows[row_header].append(slot) # Sort each list in the dict by weekday for time in rows.keys(): rows[time].sort(key=lambda slot: slot.weekday) return rows def get_shift_weekview_columns(): '''Returns a list of weekday name column headers to populate a weekview table with''' slots = ShiftSlot.objects.all() if not slots: return None cols = [] for slot in slots: col_header = slot.get_weekday_name() if col_header not in cols: cols.append(col_header) return cols ## Instruction: Order weekday columns in watchlist ## Code After: from .models import ShiftSlot, weekday_loc from website.settings import WORKSHOP_OPEN_DAYS def get_shift_weekview_rows(): '''Returns a dictionary of shifts for each timeslot, for each weekday''' slots = ShiftSlot.objects.all() if not slots: return None # Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is! rows = {} for slot in slots: row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M')) if row_header not in rows: rows[row_header] = [] rows[row_header].append(slot) # Sort each list in the dict by weekday for time in rows.keys(): rows[time].sort(key=lambda slot: slot.weekday) return rows def get_shift_weekview_columns(): '''Returns a list of weekday name column headers to populate a weekview table with''' slots = ShiftSlot.objects.all().order_by('weekday') if not slots: return None cols = [] for slot in slots: col_header = slot.get_weekday_name() if col_header not in cols: cols.append(col_header) return cols
... def get_shift_weekview_columns(): '''Returns a list of weekday name column headers to populate a weekview table with''' slots = ShiftSlot.objects.all().order_by('weekday') if not slots: return None ...
cfc3fddff9f6d54d14e0d8f109048bd28e2ac112
fake-koji/src/main/java/org/fakekoji/Utils.java
fake-koji/src/main/java/org/fakekoji/Utils.java
package org.fakekoji; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.file.Path; public class Utils { private static final ClassLoader classLoader = Utils.class.getClassLoader(); public static String readResource(Path path) throws IOException { return readFile(classLoader.getResource(path.toString())); } public static String readFile(File file) throws IOException { final StringBuilder content = new StringBuilder(); final FileReader fileReader = new FileReader(file); final BufferedReader bufferedReader = new BufferedReader(fileReader); String line; while ((line = bufferedReader.readLine()) != null) { content.append(line); } bufferedReader.close(); fileReader.close(); return content.toString(); } public static String readFile(URL url) throws IOException { return readFile(new File(url.getFile())); } public static void writeToFile(Path path, String content) throws IOException { writeToFile(path.toFile(), content); } public static void writeToFile(File file, String content) throws IOException { final PrintWriter writer = new PrintWriter(file.getAbsolutePath()); writer.write(content); writer.flush(); writer.close(); } }
package org.fakekoji; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; public class Utils { private static final ClassLoader classLoader = Utils.class.getClassLoader(); public static String readResource(Path path) throws IOException { return readFile(classLoader.getResource(path.toString())); } public static String readFile(File file) throws IOException { final StringBuilder content = new StringBuilder(); final FileReader fileReader = new FileReader(file); final BufferedReader bufferedReader = new BufferedReader(fileReader); String line; while ((line = bufferedReader.readLine()) != null) { content.append(line); } bufferedReader.close(); fileReader.close(); return content.toString(); } public static String readFile(URL url) throws IOException { return readFile(new File(url.getFile())); } public static void writeToFile(Path path, String content) throws IOException { writeToFile(path.toFile(), content); } public static void writeToFile(File file, String content) throws IOException { final PrintWriter writer = new PrintWriter(file.getAbsolutePath()); writer.write(content); writer.flush(); writer.close(); } public static void moveFile(File source, File target) throws IOException { Files.move(source.toPath(), target.toPath(), REPLACE_EXISTING); } }
Create function for moving files
Create function for moving files
Java
mit
TheIndifferent/jenkins-scm-koji-plugin,judovana/jenkins-scm-koji-plugin,TheIndifferent/jenkins-scm-koji-plugin,judovana/jenkins-scm-koji-plugin,judovana/jenkins-scm-koji-plugin
java
## Code Before: package org.fakekoji; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.file.Path; public class Utils { private static final ClassLoader classLoader = Utils.class.getClassLoader(); public static String readResource(Path path) throws IOException { return readFile(classLoader.getResource(path.toString())); } public static String readFile(File file) throws IOException { final StringBuilder content = new StringBuilder(); final FileReader fileReader = new FileReader(file); final BufferedReader bufferedReader = new BufferedReader(fileReader); String line; while ((line = bufferedReader.readLine()) != null) { content.append(line); } bufferedReader.close(); fileReader.close(); return content.toString(); } public static String readFile(URL url) throws IOException { return readFile(new File(url.getFile())); } public static void writeToFile(Path path, String content) throws IOException { writeToFile(path.toFile(), content); } public static void writeToFile(File file, String content) throws IOException { final PrintWriter writer = new PrintWriter(file.getAbsolutePath()); writer.write(content); writer.flush(); writer.close(); } } ## Instruction: Create function for moving files ## Code After: package org.fakekoji; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; public class Utils { private static final ClassLoader classLoader = Utils.class.getClassLoader(); public static String readResource(Path path) throws IOException { return readFile(classLoader.getResource(path.toString())); } public static String readFile(File file) throws IOException { final StringBuilder content = new StringBuilder(); final FileReader fileReader = new FileReader(file); final BufferedReader bufferedReader = new BufferedReader(fileReader); String line; while ((line = bufferedReader.readLine()) != null) { content.append(line); } bufferedReader.close(); fileReader.close(); return content.toString(); } public static String readFile(URL url) throws IOException { return readFile(new File(url.getFile())); } public static void writeToFile(Path path, String content) throws IOException { writeToFile(path.toFile(), content); } public static void writeToFile(File file, String content) throws IOException { final PrintWriter writer = new PrintWriter(file.getAbsolutePath()); writer.write(content); writer.flush(); writer.close(); } public static void moveFile(File source, File target) throws IOException { Files.move(source.toPath(), target.toPath(), REPLACE_EXISTING); } }
... import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; public class Utils { ... writer.flush(); writer.close(); } public static void moveFile(File source, File target) throws IOException { Files.move(source.toPath(), target.toPath(), REPLACE_EXISTING); } } ...
1f2d8fadd114106cefbc23060f742163be415376
cherrypy/process/__init__.py
cherrypy/process/__init__.py
from cherrypy.process.wspbus import bus # noqa from cherrypy.process import plugins, servers # noqa
from .wspbus import bus from . import plugins, servers __all__ = ('bus', 'plugins', 'servers')
Use __all__ to avoid linter errors
Use __all__ to avoid linter errors
Python
bsd-3-clause
Safihre/cherrypy,Safihre/cherrypy,cherrypy/cherrypy,cherrypy/cherrypy
python
## Code Before: from cherrypy.process.wspbus import bus # noqa from cherrypy.process import plugins, servers # noqa ## Instruction: Use __all__ to avoid linter errors ## Code After: from .wspbus import bus from . import plugins, servers __all__ = ('bus', 'plugins', 'servers')
... from .wspbus import bus from . import plugins, servers __all__ = ('bus', 'plugins', 'servers') ...
fef28556bc4d105feb44345782c632b8d3befa3f
server/acre/settings/dev.py
server/acre/settings/dev.py
from .base import * import os DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['RDS_DB_NAME'], 'HOST': os.environ['RDS_HOSTNAME'], 'PORT': os.environ['RDS_PORT'], 'USER': os.environ['RDS_USERNAME'], 'PASSWORD': os.environ['RDS_PASSWORD'], } } ALLOWED_HOSTS = [".us-east-2.elasticbeanstalk.com", "localhost"]
from .base import * import os DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['RDS_DB_NAME'], 'HOST': os.environ['RDS_HOSTNAME'], 'PORT': os.environ['RDS_PORT'], 'USER': os.environ['RDS_USERNAME'], 'PASSWORD': os.environ['RDS_PASSWORD'], } } ALLOWED_HOSTS = [".acre.one", ".us-east-2.elasticbeanstalk.com", "localhost"]
Add acre.one to allowed host
Add acre.one to allowed host
Python
mit
yizhang7210/Acre,yizhang7210/Acre,yizhang7210/Acre,yizhang7210/Acre
python
## Code Before: from .base import * import os DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['RDS_DB_NAME'], 'HOST': os.environ['RDS_HOSTNAME'], 'PORT': os.environ['RDS_PORT'], 'USER': os.environ['RDS_USERNAME'], 'PASSWORD': os.environ['RDS_PASSWORD'], } } ALLOWED_HOSTS = [".us-east-2.elasticbeanstalk.com", "localhost"] ## Instruction: Add acre.one to allowed host ## Code After: from .base import * import os DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['RDS_DB_NAME'], 'HOST': os.environ['RDS_HOSTNAME'], 'PORT': os.environ['RDS_PORT'], 'USER': os.environ['RDS_USERNAME'], 'PASSWORD': os.environ['RDS_PASSWORD'], } } ALLOWED_HOSTS = [".acre.one", ".us-east-2.elasticbeanstalk.com", "localhost"]
# ... existing code ... } } ALLOWED_HOSTS = [".acre.one", ".us-east-2.elasticbeanstalk.com", "localhost"] # ... rest of the code ...
7895b0a39694e88ed1bdd425c69fb747b7531c59
indico/testing/mocks.py
indico/testing/mocks.py
class MockConferenceHolder: # This class is monkeypatched on top of the real conferenceholder _events = {} def __init__(self): pass @classmethod def add(cls, event): if event.id in cls._events: __tracebackhide__ = True raise Exception("Event '{}' already exists".format(event.id)) cls._events[event.id] = event @classmethod def remove(cls, event): del cls._events[event.id] @classmethod def getById(cls, id_): return cls._events.get(id_) class MockConference(object): def __repr__(self): return '<MockConference({})>'.format(self.id) def getId(self): return self.id
class MockConferenceHolder: # This class is monkeypatched on top of the real conferenceholder _events = {} def __init__(self): pass @classmethod def add(cls, event): if event.id in cls._events: __tracebackhide__ = True raise Exception("Event '{}' already exists".format(event.id)) cls._events[int(event.id)] = event @classmethod def remove(cls, event): del cls._events[int(event.id)] @classmethod def getById(cls, id_, quiet=None): return cls._events.get(int(id_)) class MockConference(object): def __repr__(self): return '<MockConference({})>'.format(self.id) def getId(self): return self.id
Fix str/int usage in MockConferenceHolder
Fix str/int usage in MockConferenceHolder
Python
mit
indico/indico,ThiefMaster/indico,indico/indico,OmeGak/indico,ThiefMaster/indico,OmeGak/indico,mic4ael/indico,DirkHoffmann/indico,mvidalgarcia/indico,mvidalgarcia/indico,OmeGak/indico,pferreir/indico,ThiefMaster/indico,mic4ael/indico,pferreir/indico,DirkHoffmann/indico,ThiefMaster/indico,OmeGak/indico,mic4ael/indico,indico/indico,mvidalgarcia/indico,pferreir/indico,mic4ael/indico,pferreir/indico,mvidalgarcia/indico,DirkHoffmann/indico,DirkHoffmann/indico,indico/indico
python
## Code Before: class MockConferenceHolder: # This class is monkeypatched on top of the real conferenceholder _events = {} def __init__(self): pass @classmethod def add(cls, event): if event.id in cls._events: __tracebackhide__ = True raise Exception("Event '{}' already exists".format(event.id)) cls._events[event.id] = event @classmethod def remove(cls, event): del cls._events[event.id] @classmethod def getById(cls, id_): return cls._events.get(id_) class MockConference(object): def __repr__(self): return '<MockConference({})>'.format(self.id) def getId(self): return self.id ## Instruction: Fix str/int usage in MockConferenceHolder ## Code After: class MockConferenceHolder: # This class is monkeypatched on top of the real conferenceholder _events = {} def __init__(self): pass @classmethod def add(cls, event): if event.id in cls._events: __tracebackhide__ = True raise Exception("Event '{}' already exists".format(event.id)) cls._events[int(event.id)] = event @classmethod def remove(cls, event): del cls._events[int(event.id)] @classmethod def getById(cls, id_, quiet=None): return cls._events.get(int(id_)) class MockConference(object): def __repr__(self): return '<MockConference({})>'.format(self.id) def getId(self): return self.id
... if event.id in cls._events: __tracebackhide__ = True raise Exception("Event '{}' already exists".format(event.id)) cls._events[int(event.id)] = event @classmethod def remove(cls, event): del cls._events[int(event.id)] @classmethod def getById(cls, id_, quiet=None): return cls._events.get(int(id_)) class MockConference(object): ...
7740ff36679b13be9d63b333cff35f913e0066dc
python/tests/py3/test_asyncio.py
python/tests/py3/test_asyncio.py
import asyncio import pytest def test_hello_world(workspace): workspace.src('main.py', r""" import asyncio async def main(): print('Hello, ', end='') await asyncio.sleep(1) print('World!') # Python 3.7+ asyncio.run(main()) """) r = workspace.run('python main.py') assert r.out == 'Hello, World!' @pytest.mark.asyncio async def test_hello_world__pytest_asyncio(workspace): result = await async_task('World') assert result == 'Hello, World!' async def async_task(input): await asyncio.sleep(1) return 'Hello, %s!' % input def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace): workspace.src('main.py', r""" async def main(): pass print(type(main())) """) r = workspace.run('python main.py') assert r.out == "<class 'coroutine'>" assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
import asyncio import pytest def test_hello_world(workspace): workspace.src('main.py', r""" import asyncio async def do_something_else(): print('...', end='') await asyncio.sleep(1) print('!', end='') async def say_hello_async(who): print('Hello, ', end='') await asyncio.sleep(1) print(who, end='') async def main(): await asyncio.gather(say_hello_async('World'), do_something_else()) asyncio.run(main()) """) r = workspace.run('python main.py') assert r.out == 'Hello, ...World!' @pytest.mark.asyncio async def test_hello_world__pytest_asyncio(capsys): async def do_something_else(): print('...', end='') await asyncio.sleep(1) print('!', end='') async def say_hello_async(who): print('Hello, ', end='') await asyncio.sleep(1) print(who, end='') await asyncio.gather(say_hello_async('World'), do_something_else()) out, _ = capsys.readouterr() assert out == 'Hello, ...World!' def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace): workspace.src('main.py', r""" async def main(): pass print(type(main())) """) r = workspace.run('python main.py') assert r.out == "<class 'coroutine'>" assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
Make hello world (asyncio) more involved
[python] Make hello world (asyncio) more involved
Python
mit
imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning
python
## Code Before: import asyncio import pytest def test_hello_world(workspace): workspace.src('main.py', r""" import asyncio async def main(): print('Hello, ', end='') await asyncio.sleep(1) print('World!') # Python 3.7+ asyncio.run(main()) """) r = workspace.run('python main.py') assert r.out == 'Hello, World!' @pytest.mark.asyncio async def test_hello_world__pytest_asyncio(workspace): result = await async_task('World') assert result == 'Hello, World!' async def async_task(input): await asyncio.sleep(1) return 'Hello, %s!' % input def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace): workspace.src('main.py', r""" async def main(): pass print(type(main())) """) r = workspace.run('python main.py') assert r.out == "<class 'coroutine'>" assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))" ## Instruction: [python] Make hello world (asyncio) more involved ## Code After: import asyncio import pytest def test_hello_world(workspace): workspace.src('main.py', r""" import asyncio async def do_something_else(): print('...', end='') await asyncio.sleep(1) print('!', end='') async def say_hello_async(who): print('Hello, ', end='') await asyncio.sleep(1) print(who, end='') async def main(): await asyncio.gather(say_hello_async('World'), do_something_else()) asyncio.run(main()) """) r = workspace.run('python main.py') assert r.out == 'Hello, ...World!' @pytest.mark.asyncio async def test_hello_world__pytest_asyncio(capsys): async def do_something_else(): print('...', end='') await asyncio.sleep(1) print('!', end='') async def say_hello_async(who): print('Hello, ', end='') await asyncio.sleep(1) print(who, end='') await asyncio.gather(say_hello_async('World'), do_something_else()) out, _ = capsys.readouterr() assert out == 'Hello, ...World!' def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace): workspace.src('main.py', r""" async def main(): pass print(type(main())) """) r = workspace.run('python main.py') assert r.out == "<class 'coroutine'>" assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
... workspace.src('main.py', r""" import asyncio async def do_something_else(): print('...', end='') await asyncio.sleep(1) print('!', end='') async def say_hello_async(who): print('Hello, ', end='') await asyncio.sleep(1) print(who, end='') async def main(): await asyncio.gather(say_hello_async('World'), do_something_else()) asyncio.run(main()) """) r = workspace.run('python main.py') assert r.out == 'Hello, ...World!' @pytest.mark.asyncio async def test_hello_world__pytest_asyncio(capsys): async def do_something_else(): print('...', end='') await asyncio.sleep(1) print('!', end='') async def say_hello_async(who): print('Hello, ', end='') await asyncio.sleep(1) print(who, end='') await asyncio.gather(say_hello_async('World'), do_something_else()) out, _ = capsys.readouterr() assert out == 'Hello, ...World!' def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace): workspace.src('main.py', r""" ...
2d379a3bd04d2b687c719cb9ccca5f289b434d00
plenum/server/i3pc_watchers.py
plenum/server/i3pc_watchers.py
from typing import Callable, Iterable from plenum.server.quorums import Quorums class NetworkI3PCWatcher: def __init__(self, cb: Callable): self.nodes = set() self.connected = set() self.callback = cb self.quorums = Quorums(0) def connect(self, name: str): self.connected.add(name) def disconnect(self, name: str): had_consensus = self._has_consensus() self.connected.discard(name) if had_consensus and not self._has_consensus(): self.callback() def set_nodes(self, nodes: Iterable[str]): self.nodes = set(nodes) self.quorums = Quorums(len(self.nodes)) def _has_consensus(self): return self.quorums.weak.is_reached(len(self.connected))
from typing import Callable, Iterable from plenum.server.quorums import Quorums class NetworkI3PCWatcher: def __init__(self, cb: Callable): self._nodes = set() self.connected = set() self.callback = cb self.quorums = Quorums(0) def connect(self, name: str): self.connected.add(name) def disconnect(self, name: str): had_consensus = self._has_consensus() self.connected.discard(name) if had_consensus and not self._has_consensus(): self.callback() @property def nodes(self): return self._nodes def set_nodes(self, nodes: Iterable[str]): self._nodes = set(nodes) self.quorums = Quorums(len(self._nodes)) def _has_consensus(self): return self.quorums.weak.is_reached(len(self.connected))
Make interface of NetworkI3PCWatcher more clear
INDY-1199: Make interface of NetworkI3PCWatcher more clear Signed-off-by: Sergey Khoroshavin <[email protected]>
Python
apache-2.0
evernym/plenum,evernym/zeno
python
## Code Before: from typing import Callable, Iterable from plenum.server.quorums import Quorums class NetworkI3PCWatcher: def __init__(self, cb: Callable): self.nodes = set() self.connected = set() self.callback = cb self.quorums = Quorums(0) def connect(self, name: str): self.connected.add(name) def disconnect(self, name: str): had_consensus = self._has_consensus() self.connected.discard(name) if had_consensus and not self._has_consensus(): self.callback() def set_nodes(self, nodes: Iterable[str]): self.nodes = set(nodes) self.quorums = Quorums(len(self.nodes)) def _has_consensus(self): return self.quorums.weak.is_reached(len(self.connected)) ## Instruction: INDY-1199: Make interface of NetworkI3PCWatcher more clear Signed-off-by: Sergey Khoroshavin <[email protected]> ## Code After: from typing import Callable, Iterable from plenum.server.quorums import Quorums class NetworkI3PCWatcher: def __init__(self, cb: Callable): self._nodes = set() self.connected = set() self.callback = cb self.quorums = Quorums(0) def connect(self, name: str): self.connected.add(name) def disconnect(self, name: str): had_consensus = self._has_consensus() self.connected.discard(name) if had_consensus and not self._has_consensus(): self.callback() @property def nodes(self): return self._nodes def set_nodes(self, nodes: Iterable[str]): self._nodes = set(nodes) self.quorums = Quorums(len(self._nodes)) def _has_consensus(self): return self.quorums.weak.is_reached(len(self.connected))
... class NetworkI3PCWatcher: def __init__(self, cb: Callable): self._nodes = set() self.connected = set() self.callback = cb self.quorums = Quorums(0) ... if had_consensus and not self._has_consensus(): self.callback() @property def nodes(self): return self._nodes def set_nodes(self, nodes: Iterable[str]): self._nodes = set(nodes) self.quorums = Quorums(len(self._nodes)) def _has_consensus(self): return self.quorums.weak.is_reached(len(self.connected)) ...
d9b05e6ae11c5f17590d493ef7bff52239bed3bb
test2/type_propagation/expr_if_noreturn.c
test2/type_propagation/expr_if_noreturn.c
// RUN: %ocheck 0 %s _Noreturn void exit(int); void g(int i) { } int f(int p) { (p == 5 ? exit : g)(2); // this shouldn't be thought of as unreachable return 7; } main() { f(4); return 0; }
// RUN: %ucc -fsyntax-only %s _Noreturn void exit(int); __attribute((noreturn)) void exit2(int); void g(int i); _Static_assert( !__builtin_has_attribute(g, noreturn), ""); _Static_assert( __builtin_has_attribute(exit, noreturn), ""); _Static_assert( __builtin_has_attribute(exit2, noreturn), ""); _Static_assert( !__builtin_has_attribute( (1 ? exit : g), noreturn), ""); _Static_assert( !__builtin_has_attribute( (1 ? exit2 : g), noreturn), "");
Change noreturn attribute tests into static asserts
Change noreturn attribute tests into static asserts
C
mit
bobrippling/ucc-c-compiler,bobrippling/ucc-c-compiler,bobrippling/ucc-c-compiler
c
## Code Before: // RUN: %ocheck 0 %s _Noreturn void exit(int); void g(int i) { } int f(int p) { (p == 5 ? exit : g)(2); // this shouldn't be thought of as unreachable return 7; } main() { f(4); return 0; } ## Instruction: Change noreturn attribute tests into static asserts ## Code After: // RUN: %ucc -fsyntax-only %s _Noreturn void exit(int); __attribute((noreturn)) void exit2(int); void g(int i); _Static_assert( !__builtin_has_attribute(g, noreturn), ""); _Static_assert( __builtin_has_attribute(exit, noreturn), ""); _Static_assert( __builtin_has_attribute(exit2, noreturn), ""); _Static_assert( !__builtin_has_attribute( (1 ? exit : g), noreturn), ""); _Static_assert( !__builtin_has_attribute( (1 ? exit2 : g), noreturn), "");
# ... existing code ... // RUN: %ucc -fsyntax-only %s _Noreturn void exit(int); __attribute((noreturn)) void exit2(int); void g(int i); _Static_assert( !__builtin_has_attribute(g, noreturn), ""); _Static_assert( __builtin_has_attribute(exit, noreturn), ""); _Static_assert( __builtin_has_attribute(exit2, noreturn), ""); _Static_assert( !__builtin_has_attribute( (1 ? exit : g), noreturn), ""); _Static_assert( !__builtin_has_attribute( (1 ? exit2 : g), noreturn), ""); # ... rest of the code ...
81004ff9605d8bd19c50ca65d69970283c462a83
BloodBank_JAVA/src/persons/Donor.java
BloodBank_JAVA/src/persons/Donor.java
package persons; /** * Created by fcmam5 on 11/13/15. * Implemented by Mouilah-Sarra */ public class Donor extends Personne { private static int donNum = 0; private int quantite; boolean motivation; public Donor(String name, String firstName, int age, String grouping, String[] adresse, int quantite, boolean motivation) { super(name, firstName, age, grouping, adresse); this.donNum++; this.quantite = quantite; this.motivation = motivation; } public void setGroupage (String gropage){ setGrouping(gropage); } public String getGroupage(){ return getGrouping(); } public int getDonNum( ){ return this.donNum; } // HOw much he gives ? public void setQuantite (int quantite){ this.quantite=quantite; } public int getQuantite (){ return this.quantite; } }
package persons; import java.util.Objects; /** * Created by fcmam5 on 11/13/15. * Implemented by Mouilah-Sarra */ public class Donor extends Personne { private static int donNum = 0; private int quantite; private boolean motivation; private Object donorCard[] = new Object[3]; // [[int DonorKey]|[String email]|[int telNumber]] public Donor(String name, String firstName, int age, String grouping, String[] adresse, int quantite, boolean motivation) { super(name, firstName, age, grouping, adresse); this.donNum++; this.quantite = quantite; this.motivation = motivation; } public void setGroupage (String gropage){ setGrouping(gropage); } public String getGroupage(){ return getGrouping(); } public int getDonNum( ){ return this.donNum; } // HOw much he gives ? public void setQuantite (int quantite){ this.quantite=quantite; } public int getQuantite (){ return this.quantite; } /** * if He is motivated let's keep his infos in a table : * [[String DonorKey]|[String email]|[int telNumber]] * DonorKey is [firstDon+the name & first's first nameLetter] * Example : "Johny Bob's first donNum was 54 (Was the the 54th donor)" * His code will be : JB54 */ public Donor(String name, String firstName, int age, String grouping, String[] adresse_given, int quantite) { super(name, firstName, age, grouping, adresse_given); this.quantite = quantite; this.motivation = true; //Filling DonorCard : this.donorCard[0] = name.charAt(0)+firstName.charAt(0)+this.getDonNum(); //Code B-) this.donorCard[1] = name; this.donorCard[2] = firstName; } }
Set a key to donors
Set a key to donors
Java
mit
Fcmam5/BloodBank_project
java
## Code Before: package persons; /** * Created by fcmam5 on 11/13/15. * Implemented by Mouilah-Sarra */ public class Donor extends Personne { private static int donNum = 0; private int quantite; boolean motivation; public Donor(String name, String firstName, int age, String grouping, String[] adresse, int quantite, boolean motivation) { super(name, firstName, age, grouping, adresse); this.donNum++; this.quantite = quantite; this.motivation = motivation; } public void setGroupage (String gropage){ setGrouping(gropage); } public String getGroupage(){ return getGrouping(); } public int getDonNum( ){ return this.donNum; } // HOw much he gives ? public void setQuantite (int quantite){ this.quantite=quantite; } public int getQuantite (){ return this.quantite; } } ## Instruction: Set a key to donors ## Code After: package persons; import java.util.Objects; /** * Created by fcmam5 on 11/13/15. * Implemented by Mouilah-Sarra */ public class Donor extends Personne { private static int donNum = 0; private int quantite; private boolean motivation; private Object donorCard[] = new Object[3]; // [[int DonorKey]|[String email]|[int telNumber]] public Donor(String name, String firstName, int age, String grouping, String[] adresse, int quantite, boolean motivation) { super(name, firstName, age, grouping, adresse); this.donNum++; this.quantite = quantite; this.motivation = motivation; } public void setGroupage (String gropage){ setGrouping(gropage); } public String getGroupage(){ return getGrouping(); } public int getDonNum( ){ return this.donNum; } // HOw much he gives ? public void setQuantite (int quantite){ this.quantite=quantite; } public int getQuantite (){ return this.quantite; } /** * if He is motivated let's keep his infos in a table : * [[String DonorKey]|[String email]|[int telNumber]] * DonorKey is [firstDon+the name & first's first nameLetter] * Example : "Johny Bob's first donNum was 54 (Was the the 54th donor)" * His code will be : JB54 */ public Donor(String name, String firstName, int age, String grouping, String[] adresse_given, int quantite) { super(name, firstName, age, grouping, adresse_given); this.quantite = quantite; this.motivation = true; //Filling DonorCard : this.donorCard[0] = name.charAt(0)+firstName.charAt(0)+this.getDonNum(); //Code B-) this.donorCard[1] = name; this.donorCard[2] = firstName; } }
... package persons; import java.util.Objects; /** * Created by fcmam5 on 11/13/15. ... public class Donor extends Personne { private static int donNum = 0; private int quantite; private boolean motivation; private Object donorCard[] = new Object[3]; // [[int DonorKey]|[String email]|[int telNumber]] public Donor(String name, String firstName, int age, String grouping, String[] adresse, int quantite, boolean motivation) { super(name, firstName, age, grouping, adresse); ... } /** * if He is motivated let's keep his infos in a table : * [[String DonorKey]|[String email]|[int telNumber]] * DonorKey is [firstDon+the name & first's first nameLetter] * Example : "Johny Bob's first donNum was 54 (Was the the 54th donor)" * His code will be : JB54 */ public Donor(String name, String firstName, int age, String grouping, String[] adresse_given, int quantite) { super(name, firstName, age, grouping, adresse_given); this.quantite = quantite; this.motivation = true; //Filling DonorCard : this.donorCard[0] = name.charAt(0)+firstName.charAt(0)+this.getDonNum(); //Code B-) this.donorCard[1] = name; this.donorCard[2] = firstName; } } ...
0163470450726394efaf11570daade9f34eb2f6e
util/util_export.h
util/util_export.h
// // Copyright 2018 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // util_export.h : Defines ANGLE_UTIL_EXPORT, a macro for exporting symbols. #ifndef UTIL_EXPORT_H_ #define UTIL_EXPORT_H_ #if !defined(ANGLE_UTIL_EXPORT) # if defined(_WIN32) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __declspec(dllexport) # else # define ANGLE_UTIL_EXPORT __declspec(dllimport) # endif # elif defined(__GNUC__) # define ANGLE_UTIL_EXPORT __attribute__((visibility("default"))) # else # define ANGLE_UTIL_EXPORT # endif #endif // !defined(ANGLE_UTIL_EXPORT) #endif // UTIL_EXPORT_H_
// // Copyright 2018 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // util_export.h : Defines ANGLE_UTIL_EXPORT, a macro for exporting symbols. #ifndef UTIL_EXPORT_H_ #define UTIL_EXPORT_H_ #if !defined(ANGLE_UTIL_EXPORT) # if defined(_WIN32) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __declspec(dllexport) # else # define ANGLE_UTIL_EXPORT __declspec(dllimport) # endif # elif defined(__GNUC__) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __attribute__((visibility("default"))) # else # define ANGLE_UTIL_EXPORT # endif # else # define ANGLE_UTIL_EXPORT # endif #endif // !defined(ANGLE_UTIL_EXPORT) #endif // UTIL_EXPORT_H_
Revert "util: Always specify default visibility on exports."
Revert "util: Always specify default visibility on exports." This reverts commit 2bf23ea84e4f071c18f01b94748f3be7dccc4019. Reason for revert: Probably not the right fix. Will export all angle_utils symbols in places where they shouldn't be. Original change's description: > util: Always specify default visibility on exports. > > This fixes undefined behaviour with CFI. > > Bug: chromium:1015810 > Bug: angleproject:3162 > Change-Id: I58cfb78adabbff05e5b4560dfd70b190411fa26d > Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1869303 > Reviewed-by: Jamie Madill <[email protected]> > Commit-Queue: Jamie Madill <[email protected]> [email protected],[email protected] Change-Id: Ie847a9e6506178eb2b14e63a1ee5e9a1775b4548 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: chromium:1015810, angleproject:3162 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1869546 Reviewed-by: Jamie Madill <[email protected]> Commit-Queue: Jamie Madill <[email protected]>
C
bsd-3-clause
ppy/angle,ppy/angle,ppy/angle,ppy/angle
c
## Code Before: // // Copyright 2018 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // util_export.h : Defines ANGLE_UTIL_EXPORT, a macro for exporting symbols. #ifndef UTIL_EXPORT_H_ #define UTIL_EXPORT_H_ #if !defined(ANGLE_UTIL_EXPORT) # if defined(_WIN32) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __declspec(dllexport) # else # define ANGLE_UTIL_EXPORT __declspec(dllimport) # endif # elif defined(__GNUC__) # define ANGLE_UTIL_EXPORT __attribute__((visibility("default"))) # else # define ANGLE_UTIL_EXPORT # endif #endif // !defined(ANGLE_UTIL_EXPORT) #endif // UTIL_EXPORT_H_ ## Instruction: Revert "util: Always specify default visibility on exports." This reverts commit 2bf23ea84e4f071c18f01b94748f3be7dccc4019. Reason for revert: Probably not the right fix. Will export all angle_utils symbols in places where they shouldn't be. Original change's description: > util: Always specify default visibility on exports. > > This fixes undefined behaviour with CFI. > > Bug: chromium:1015810 > Bug: angleproject:3162 > Change-Id: I58cfb78adabbff05e5b4560dfd70b190411fa26d > Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1869303 > Reviewed-by: Jamie Madill <[email protected]> > Commit-Queue: Jamie Madill <[email protected]> [email protected],[email protected] Change-Id: Ie847a9e6506178eb2b14e63a1ee5e9a1775b4548 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: chromium:1015810, angleproject:3162 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/1869546 Reviewed-by: Jamie Madill <[email protected]> Commit-Queue: Jamie Madill <[email protected]> ## Code After: // // Copyright 2018 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // util_export.h : Defines ANGLE_UTIL_EXPORT, a macro for exporting symbols. #ifndef UTIL_EXPORT_H_ #define UTIL_EXPORT_H_ #if !defined(ANGLE_UTIL_EXPORT) # if defined(_WIN32) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __declspec(dllexport) # else # define ANGLE_UTIL_EXPORT __declspec(dllimport) # endif # elif defined(__GNUC__) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __attribute__((visibility("default"))) # else # define ANGLE_UTIL_EXPORT # endif # else # define ANGLE_UTIL_EXPORT # endif #endif // !defined(ANGLE_UTIL_EXPORT) #endif // UTIL_EXPORT_H_
... # define ANGLE_UTIL_EXPORT __declspec(dllimport) # endif # elif defined(__GNUC__) # if defined(LIBANGLE_UTIL_IMPLEMENTATION) # define ANGLE_UTIL_EXPORT __attribute__((visibility("default"))) # else # define ANGLE_UTIL_EXPORT # endif # else # define ANGLE_UTIL_EXPORT # endif ...
13e4d867e724f408b5d2dd21888b2f8a28d8fbc6
fabfile.py
fabfile.py
from __future__ import print_function import webbrowser import oinspect.sphinxify as oi def test_basic(): """Test with an empty context""" docstring = 'A test' content = oi.sphinxify(docstring, oi.generate_context()) page_name = '/tmp/test_basic.html' with open(page_name, 'w') as f: f.write(content) webbrowser.open_new_tab(page_name) def run_all(): """Run all tests""" test_basic()
from __future__ import print_function import webbrowser import oinspect.sphinxify as oi def _show_page(content, fname): with open(fname, 'w') as f: f.write(content) webbrowser.open_new_tab(fname) def test_basic(): """Test with an empty context""" docstring = 'A test' content = oi.sphinxify(docstring, oi.generate_context()) _show_page(content, '/tmp/test_basic.html') def test_math(): """Test a docstring with Latex on it""" docstring = 'This is a rational number :math:`\\frac{x}{y}`' content = oi.sphinxify(docstring, oi.generate_context()) _show_page(content, '/tmp/test_math.html') def run_all(): """Run all tests""" test_basic()
Add a test for math
Add a test for math
Python
bsd-3-clause
techtonik/docrepr,spyder-ide/docrepr,techtonik/docrepr,techtonik/docrepr,spyder-ide/docrepr,spyder-ide/docrepr
python
## Code Before: from __future__ import print_function import webbrowser import oinspect.sphinxify as oi def test_basic(): """Test with an empty context""" docstring = 'A test' content = oi.sphinxify(docstring, oi.generate_context()) page_name = '/tmp/test_basic.html' with open(page_name, 'w') as f: f.write(content) webbrowser.open_new_tab(page_name) def run_all(): """Run all tests""" test_basic() ## Instruction: Add a test for math ## Code After: from __future__ import print_function import webbrowser import oinspect.sphinxify as oi def _show_page(content, fname): with open(fname, 'w') as f: f.write(content) webbrowser.open_new_tab(fname) def test_basic(): """Test with an empty context""" docstring = 'A test' content = oi.sphinxify(docstring, oi.generate_context()) _show_page(content, '/tmp/test_basic.html') def test_math(): """Test a docstring with Latex on it""" docstring = 'This is a rational number :math:`\\frac{x}{y}`' content = oi.sphinxify(docstring, oi.generate_context()) _show_page(content, '/tmp/test_math.html') def run_all(): """Run all tests""" test_basic()
# ... existing code ... import oinspect.sphinxify as oi def _show_page(content, fname): with open(fname, 'w') as f: f.write(content) webbrowser.open_new_tab(fname) def test_basic(): """Test with an empty context""" docstring = 'A test' content = oi.sphinxify(docstring, oi.generate_context()) _show_page(content, '/tmp/test_basic.html') def test_math(): """Test a docstring with Latex on it""" docstring = 'This is a rational number :math:`\\frac{x}{y}`' content = oi.sphinxify(docstring, oi.generate_context()) _show_page(content, '/tmp/test_math.html') def run_all(): """Run all tests""" # ... rest of the code ...
428c5194f6073df9c1eb083de3d2eb9b75c294c3
samples/kotlin-rulesets-using/build.gradle.kts
samples/kotlin-rulesets-using/build.gradle.kts
import org.jlleitschuh.gradle.ktlint.KtlintExtension import org.jlleitschuh.gradle.ktlint.reporter.ReporterType plugins { application } plugins.apply("org.jlleitschuh.gradle.ktlint") apply { plugin("kotlin") } application { mainClassName = "org.jlleitschuh.gradle.ktlint.sample.kotlin.MainKt" } dependencies { compile(kotlin("stdlib")) } configure<KtlintExtension> { verbose.set(true) outputToConsole.set(true) ruleSets.set(listOf("../kotlin-rulesets-creating/build/libs/kotlin-rulesets-creating.jar")) reporters.set(setOf(ReporterType.CHECKSTYLE, ReporterType.JSON)) } tasks.findByName("ktlintMainSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintTestSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build")
import org.jlleitschuh.gradle.ktlint.KtlintExtension import org.jlleitschuh.gradle.ktlint.reporter.ReporterType plugins { application } plugins.apply("org.jlleitschuh.gradle.ktlint") apply { plugin("kotlin") } application { mainClassName = "org.jlleitschuh.gradle.ktlint.sample.kotlin.MainKt" } dependencies { compile(kotlin("stdlib")) } configure<KtlintExtension> { verbose.set(true) outputToConsole.set(true) ruleSets.set(listOf("../kotlin-rulesets-creating/build/libs/kotlin-rulesets-creating.jar")) reporters.set(setOf(ReporterType.CHECKSTYLE, ReporterType.JSON)) } tasks.findByName("ktlintMainSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintTestSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintKotlinScriptCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build")
Fix check is failed in sample that uses ruleset.
Fix check is failed in sample that uses ruleset. Signed-off-by: Yahor Berdnikau <[email protected]>
Kotlin
mit
JLLeitschuh/ktlint-gradle,JLLeitschuh/ktlint-gradle
kotlin
## Code Before: import org.jlleitschuh.gradle.ktlint.KtlintExtension import org.jlleitschuh.gradle.ktlint.reporter.ReporterType plugins { application } plugins.apply("org.jlleitschuh.gradle.ktlint") apply { plugin("kotlin") } application { mainClassName = "org.jlleitschuh.gradle.ktlint.sample.kotlin.MainKt" } dependencies { compile(kotlin("stdlib")) } configure<KtlintExtension> { verbose.set(true) outputToConsole.set(true) ruleSets.set(listOf("../kotlin-rulesets-creating/build/libs/kotlin-rulesets-creating.jar")) reporters.set(setOf(ReporterType.CHECKSTYLE, ReporterType.JSON)) } tasks.findByName("ktlintMainSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintTestSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") ## Instruction: Fix check is failed in sample that uses ruleset. Signed-off-by: Yahor Berdnikau <[email protected]> ## Code After: import org.jlleitschuh.gradle.ktlint.KtlintExtension import org.jlleitschuh.gradle.ktlint.reporter.ReporterType plugins { application } plugins.apply("org.jlleitschuh.gradle.ktlint") apply { plugin("kotlin") } application { mainClassName = "org.jlleitschuh.gradle.ktlint.sample.kotlin.MainKt" } dependencies { compile(kotlin("stdlib")) } configure<KtlintExtension> { verbose.set(true) outputToConsole.set(true) ruleSets.set(listOf("../kotlin-rulesets-creating/build/libs/kotlin-rulesets-creating.jar")) reporters.set(setOf(ReporterType.CHECKSTYLE, ReporterType.JSON)) } tasks.findByName("ktlintMainSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintTestSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintKotlinScriptCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build")
# ... existing code ... tasks.findByName("ktlintMainSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintTestSourceSetCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") tasks.findByName("ktlintKotlinScriptCheck")?.dependsOn(":samples:kotlin-rulesets-creating:build") # ... rest of the code ...
baf1cdaeb105781fd457bfd9d2a161e17c272a2d
tests/utils/core-utils.h
tests/utils/core-utils.h
/* * This file is part of meego-im-framework * * * Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). * All rights reserved. * Contact: Nokia Corporation ([email protected]) * * If you have questions regarding the use of this file, please contact * Nokia at [email protected]. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1 as published by the Free Software Foundation * and appearing in the file LICENSE.LGPL included in the packaging * of this file. */ #ifndef CORE_UTILS_H__ #define CORE_UTILS_H__ #include <QString> #include <QObject> namespace MaliitTestUtils { bool isTestingInSandbox(); QString getTestPluginPath(); QString getTestDataPath(); void waitForSignal(const QObject* object, const char* signal, int timeout); void waitAndProcessEvents(int waitTime); } #endif // CORE_UTILS_H__
/* * This file is part of meego-im-framework * * * Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). * All rights reserved. * Contact: Nokia Corporation ([email protected]) * * If you have questions regarding the use of this file, please contact * Nokia at [email protected]. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1 as published by the Free Software Foundation * and appearing in the file LICENSE.LGPL included in the packaging * of this file. */ #ifndef CORE_UTILS_H__ #define CORE_UTILS_H__ #include <QString> #include <QObject> #include "abstractsurfacegroup.h" #include "abstractsurfacegroupfactory.h" namespace MaliitTestUtils { bool isTestingInSandbox(); QString getTestPluginPath(); QString getTestDataPath(); void waitForSignal(const QObject* object, const char* signal, int timeout); void waitAndProcessEvents(int waitTime); class TestSurfaceGroup : public Maliit::Server::AbstractSurfaceGroup { public: TestSurfaceGroup() {} Maliit::Plugins::AbstractSurfaceFactory *factory() { return 0; } void activate() {} void deactivate() {} void setRotation(Maliit::OrientationAngle) {} }; class TestSurfaceGroupFactory : public Maliit::Server::AbstractSurfaceGroupFactory { public: TestSurfaceGroupFactory() {} QSharedPointer<Maliit::Server::AbstractSurfaceGroup> createSurfaceGroup() { return QSharedPointer<Maliit::Server::AbstractSurfaceGroup>(new TestSurfaceGroup); } }; } #endif // CORE_UTILS_H__
Add surface server side implementation for tests
Add surface server side implementation for tests Add a TestSurfaceGroup and TestSurfaceGroupFactory class for tests. RevBy: TrustMe. Full, original commit at 4dc9c4567301f2481b12965bdcf02a7281963b61 in maliit-framework
C
lgpl-2.1
maliit/inputcontext-gtk,maliit/inputcontext-gtk
c
## Code Before: /* * This file is part of meego-im-framework * * * Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). * All rights reserved. * Contact: Nokia Corporation ([email protected]) * * If you have questions regarding the use of this file, please contact * Nokia at [email protected]. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1 as published by the Free Software Foundation * and appearing in the file LICENSE.LGPL included in the packaging * of this file. */ #ifndef CORE_UTILS_H__ #define CORE_UTILS_H__ #include <QString> #include <QObject> namespace MaliitTestUtils { bool isTestingInSandbox(); QString getTestPluginPath(); QString getTestDataPath(); void waitForSignal(const QObject* object, const char* signal, int timeout); void waitAndProcessEvents(int waitTime); } #endif // CORE_UTILS_H__ ## Instruction: Add surface server side implementation for tests Add a TestSurfaceGroup and TestSurfaceGroupFactory class for tests. RevBy: TrustMe. Full, original commit at 4dc9c4567301f2481b12965bdcf02a7281963b61 in maliit-framework ## Code After: /* * This file is part of meego-im-framework * * * Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). * All rights reserved. * Contact: Nokia Corporation ([email protected]) * * If you have questions regarding the use of this file, please contact * Nokia at [email protected]. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1 as published by the Free Software Foundation * and appearing in the file LICENSE.LGPL included in the packaging * of this file. */ #ifndef CORE_UTILS_H__ #define CORE_UTILS_H__ #include <QString> #include <QObject> #include "abstractsurfacegroup.h" #include "abstractsurfacegroupfactory.h" namespace MaliitTestUtils { bool isTestingInSandbox(); QString getTestPluginPath(); QString getTestDataPath(); void waitForSignal(const QObject* object, const char* signal, int timeout); void waitAndProcessEvents(int waitTime); class TestSurfaceGroup : public Maliit::Server::AbstractSurfaceGroup { public: TestSurfaceGroup() {} Maliit::Plugins::AbstractSurfaceFactory *factory() { return 0; } void activate() {} void deactivate() {} void setRotation(Maliit::OrientationAngle) {} }; class TestSurfaceGroupFactory : public Maliit::Server::AbstractSurfaceGroupFactory { public: TestSurfaceGroupFactory() {} QSharedPointer<Maliit::Server::AbstractSurfaceGroup> createSurfaceGroup() { return QSharedPointer<Maliit::Server::AbstractSurfaceGroup>(new TestSurfaceGroup); } }; } #endif // CORE_UTILS_H__
... #include <QString> #include <QObject> #include "abstractsurfacegroup.h" #include "abstractsurfacegroupfactory.h" namespace MaliitTestUtils { bool isTestingInSandbox(); ... QString getTestDataPath(); void waitForSignal(const QObject* object, const char* signal, int timeout); void waitAndProcessEvents(int waitTime); class TestSurfaceGroup : public Maliit::Server::AbstractSurfaceGroup { public: TestSurfaceGroup() {} Maliit::Plugins::AbstractSurfaceFactory *factory() { return 0; } void activate() {} void deactivate() {} void setRotation(Maliit::OrientationAngle) {} }; class TestSurfaceGroupFactory : public Maliit::Server::AbstractSurfaceGroupFactory { public: TestSurfaceGroupFactory() {} QSharedPointer<Maliit::Server::AbstractSurfaceGroup> createSurfaceGroup() { return QSharedPointer<Maliit::Server::AbstractSurfaceGroup>(new TestSurfaceGroup); } }; } #endif // CORE_UTILS_H__ ...
0dfb86ff7d5785da086210d337e110462d5729b2
src/main/java/com/antew/redditinpictures/library/json/JsonDeserializer.java
src/main/java/com/antew/redditinpictures/library/json/JsonDeserializer.java
package com.antew.redditinpictures.library.json; import com.antew.redditinpictures.library.enums.Vote; import com.antew.redditinpictures.library.gson.VoteAdapter; import com.antew.redditinpictures.library.logging.Log; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; public class JsonDeserializer { public static final String TAG = JsonDeserializer.class.getSimpleName(); private static Gson gson; public static <T> T deserialize(String json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static Gson getGson() { if (gson == null) { GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Vote.class, new VoteAdapter()); builder.serializeNulls(); gson = builder.create(); } return gson; } }
package com.antew.redditinpictures.library.json; import com.antew.redditinpictures.library.enums.Vote; import com.antew.redditinpictures.library.gson.VoteAdapter; import com.antew.redditinpictures.library.logging.Log; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import java.io.Reader; public class JsonDeserializer { public static final String TAG = JsonDeserializer.class.getSimpleName(); private static Gson gson; public static <T> T deserialize(Reader json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static <T> T deserialize(String json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static Gson getGson() { if (gson == null) { GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Vote.class, new VoteAdapter()); builder.serializeNulls(); gson = builder.create(); } return gson; } }
Add JSON deserializer which takes a reader
Add JSON deserializer which takes a reader
Java
apache-2.0
antew/RedditInPictures
java
## Code Before: package com.antew.redditinpictures.library.json; import com.antew.redditinpictures.library.enums.Vote; import com.antew.redditinpictures.library.gson.VoteAdapter; import com.antew.redditinpictures.library.logging.Log; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; public class JsonDeserializer { public static final String TAG = JsonDeserializer.class.getSimpleName(); private static Gson gson; public static <T> T deserialize(String json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static Gson getGson() { if (gson == null) { GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Vote.class, new VoteAdapter()); builder.serializeNulls(); gson = builder.create(); } return gson; } } ## Instruction: Add JSON deserializer which takes a reader ## Code After: package com.antew.redditinpictures.library.json; import com.antew.redditinpictures.library.enums.Vote; import com.antew.redditinpictures.library.gson.VoteAdapter; import com.antew.redditinpictures.library.logging.Log; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import java.io.Reader; public class JsonDeserializer { public static final String TAG = JsonDeserializer.class.getSimpleName(); private static Gson gson; public static <T> T deserialize(Reader json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static <T> T deserialize(String json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static Gson getGson() { if (gson == null) { GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Vote.class, new VoteAdapter()); builder.serializeNulls(); gson = builder.create(); } return gson; } }
... import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import java.io.Reader; public class JsonDeserializer { public static final String TAG = JsonDeserializer.class.getSimpleName(); private static Gson gson; public static <T> T deserialize(Reader json, Class<T> clazz) { try { return getGson().fromJson(json, clazz); } catch (JsonSyntaxException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } catch (IllegalStateException e) { Log.e(TAG, "deserialize - Error parsing JSON!", e); } return null; } public static <T> T deserialize(String json, Class<T> clazz) { try { ...
e23b396eac6307773d2290f676c1846f236478d5
sandbox-nashorn-restricted/src/main/java/org/example/Sandbox.java
sandbox-nashorn-restricted/src/main/java/org/example/Sandbox.java
package org.example; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import java.security.AccessControlContext; import java.security.AccessController; import java.security.AllPermission; import java.security.CodeSource; import java.security.Permissions; import java.security.Policy; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.ProtectionDomain; import java.security.cert.Certificate; public class Sandbox { public void runNashornScriptInSandbox(String source) throws Exception { ScriptEngineManager manager = new ScriptEngineManager(); ScriptEngine engine = manager.getEngineByName("nashorn"); Object result = sandbox(() -> { return engine.eval(source); }); System.out.println(String.format("result is '%s'", result)); } private Object sandbox(PrivilegedExceptionAction<Object> action) throws PrivilegedActionException { Policy.setPolicy(new java.security.Policy() { @Override public java.security.PermissionCollection getPermissions(ProtectionDomain domain) { Permissions permissions = new Permissions(); permissions.add(new AllPermission()); return permissions; } }); System.setSecurityManager(new SecurityManager()); ProtectionDomain domain = new ProtectionDomain(new CodeSource(null, (Certificate[]) null), new Permissions()); final AccessControlContext context = new AccessControlContext(new ProtectionDomain[]{domain}); try { return AccessController.doPrivileged(action, context); } finally { System.setSecurityManager(null); } } }
package org.example; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import java.security.AccessControlContext; import java.security.AccessController; import java.security.AllPermission; import java.security.CodeSource; import java.security.Permissions; import java.security.Policy; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.ProtectionDomain; import java.security.cert.Certificate; public class Sandbox { public void runNashornScriptInSandbox(String source) throws Exception { ScriptEngineManager manager = new ScriptEngineManager(); ScriptEngine engine = manager.getEngineByName("nashorn"); Object result = sandbox(() -> { return engine.eval(source); }); System.out.println(String.format("result is '%s'", result)); } private Object sandbox(PrivilegedExceptionAction<Object> action) throws PrivilegedActionException { Policy.setPolicy(new java.security.Policy() { @Override public java.security.PermissionCollection getPermissions(ProtectionDomain domain) { Permissions permissions = new Permissions(); permissions.add(new AllPermission()); return permissions; } }); System.setSecurityManager(new SecurityManager()); ProtectionDomain domain = new ProtectionDomain(new CodeSource(null, (Certificate[]) null), new Permissions()); final AccessControlContext context = new AccessControlContext(new ProtectionDomain[]{domain}); try { return AccessController.doPrivileged(action, context); } finally { try { System.setSecurityManager(null); } catch (SecurityException e) { e.printStackTrace(); throw e; } } } }
Print stacktrace when security exception thrown
Print stacktrace when security exception thrown
Java
mit
ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox,ohtomi/sandbox
java
## Code Before: package org.example; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import java.security.AccessControlContext; import java.security.AccessController; import java.security.AllPermission; import java.security.CodeSource; import java.security.Permissions; import java.security.Policy; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.ProtectionDomain; import java.security.cert.Certificate; public class Sandbox { public void runNashornScriptInSandbox(String source) throws Exception { ScriptEngineManager manager = new ScriptEngineManager(); ScriptEngine engine = manager.getEngineByName("nashorn"); Object result = sandbox(() -> { return engine.eval(source); }); System.out.println(String.format("result is '%s'", result)); } private Object sandbox(PrivilegedExceptionAction<Object> action) throws PrivilegedActionException { Policy.setPolicy(new java.security.Policy() { @Override public java.security.PermissionCollection getPermissions(ProtectionDomain domain) { Permissions permissions = new Permissions(); permissions.add(new AllPermission()); return permissions; } }); System.setSecurityManager(new SecurityManager()); ProtectionDomain domain = new ProtectionDomain(new CodeSource(null, (Certificate[]) null), new Permissions()); final AccessControlContext context = new AccessControlContext(new ProtectionDomain[]{domain}); try { return AccessController.doPrivileged(action, context); } finally { System.setSecurityManager(null); } } } ## Instruction: Print stacktrace when security exception thrown ## Code After: package org.example; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import java.security.AccessControlContext; import java.security.AccessController; import java.security.AllPermission; import java.security.CodeSource; import java.security.Permissions; import java.security.Policy; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.ProtectionDomain; import java.security.cert.Certificate; public class Sandbox { public void runNashornScriptInSandbox(String source) throws Exception { ScriptEngineManager manager = new ScriptEngineManager(); ScriptEngine engine = manager.getEngineByName("nashorn"); Object result = sandbox(() -> { return engine.eval(source); }); System.out.println(String.format("result is '%s'", result)); } private Object sandbox(PrivilegedExceptionAction<Object> action) throws PrivilegedActionException { Policy.setPolicy(new java.security.Policy() { @Override public java.security.PermissionCollection getPermissions(ProtectionDomain domain) { Permissions permissions = new Permissions(); permissions.add(new AllPermission()); return permissions; } }); System.setSecurityManager(new SecurityManager()); ProtectionDomain domain = new ProtectionDomain(new CodeSource(null, (Certificate[]) null), new Permissions()); final AccessControlContext context = new AccessControlContext(new ProtectionDomain[]{domain}); try { return AccessController.doPrivileged(action, context); } finally { try { System.setSecurityManager(null); } catch (SecurityException e) { e.printStackTrace(); throw e; } } } }
# ... existing code ... try { return AccessController.doPrivileged(action, context); } finally { try { System.setSecurityManager(null); } catch (SecurityException e) { e.printStackTrace(); throw e; } } } } # ... rest of the code ...
df4967b5e71e32f70e97d52a320d9b32d70095b7
main.py
main.py
import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def now_playing(itunes): track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def is_playing(itunes): return itunes.player_state.get() == k.playing def now_playing(itunes): if not is_playing(itunes): return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): if is_playing(itunes): return play_next(itunes) itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
Check if song is_playing before play
Check if song is_playing before play
Python
mit
kshvmdn/nowplaying
python
## Code Before: import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def now_playing(itunes): track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main() ## Instruction: Check if song is_playing before play ## Code After: import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def is_playing(itunes): return itunes.player_state.get() == k.playing def now_playing(itunes): if not is_playing(itunes): return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): if is_playing(itunes): return play_next(itunes) itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
// ... existing code ... return itunes.quit() def is_playing(itunes): return itunes.player_state.get() == k.playing def now_playing(itunes): if not is_playing(itunes): return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), // ... modified code ... def play(itunes): if is_playing(itunes): return play_next(itunes) itunes.play() return now_playing(itunes) // ... rest of the code ...
e434266c709807509960cdcdacf63ae331a1d622
tests/model/TestBoard.java
tests/model/TestBoard.java
package model; import junit.framework.TestCase; import java.awt.Color; /** * * @author bwoconnor * */ public class TestBoard extends TestCase{ public void testBoard(){ } }
package model; import junit.framework.TestCase; import java.awt.Color; /** * * @author bwoconnor * */ public class TestBoard extends TestCase{ public void testBoard(){ Board b = new Board(); b.board[0][0] = new BoardTile(0,0); b.board[1][0] = new BoardTile(1,0); b.board[2][0] = new BoardTile(2,0); b.board[3][0] = new BoardTile(3,0); b.board[4][0] = new BoardTile(4,0); b.board[5][0] = new BoardTile(5,0); } }
Test created, had to stop because of piece issues.
Test created, had to stop because of piece issues.
Java
mit
Betta-Testers/Imbrius-Kabasuji
java
## Code Before: package model; import junit.framework.TestCase; import java.awt.Color; /** * * @author bwoconnor * */ public class TestBoard extends TestCase{ public void testBoard(){ } } ## Instruction: Test created, had to stop because of piece issues. ## Code After: package model; import junit.framework.TestCase; import java.awt.Color; /** * * @author bwoconnor * */ public class TestBoard extends TestCase{ public void testBoard(){ Board b = new Board(); b.board[0][0] = new BoardTile(0,0); b.board[1][0] = new BoardTile(1,0); b.board[2][0] = new BoardTile(2,0); b.board[3][0] = new BoardTile(3,0); b.board[4][0] = new BoardTile(4,0); b.board[5][0] = new BoardTile(5,0); } }
# ... existing code ... public class TestBoard extends TestCase{ public void testBoard(){ Board b = new Board(); b.board[0][0] = new BoardTile(0,0); b.board[1][0] = new BoardTile(1,0); b.board[2][0] = new BoardTile(2,0); b.board[3][0] = new BoardTile(3,0); b.board[4][0] = new BoardTile(4,0); b.board[5][0] = new BoardTile(5,0); } # ... rest of the code ...
fae15bc929dc60d57c168a1904c9088f66b4549d
subprojects/dependency-management/dependency-management.gradle.kts
subprojects/dependency-management/dependency-management.gradle.kts
import org.gradle.build.ClasspathManifest import org.gradle.gradlebuild.testing.integrationtests.cleanup.EmptyDirectoryCheck import org.gradle.gradlebuild.unittestandcompile.ModuleType plugins { `java-library` } dependencies { api(project(":core")) api(project(":versionControl")) implementation(project(":resources")) implementation(project(":resourcesHttp")) implementation(library("asm")) implementation(library("asm_commons")) implementation(library("asm_util")) implementation(library("commons_lang")) implementation(library("commons_io")) implementation(library("ivy")) implementation(library("slf4j_api")) implementation(library("gson")) implementation(library("jcip")) implementation(library("maven3")) runtimeOnly(library("bouncycastle_provider")) runtimeOnly(project(":installationBeacon")) testImplementation(library("nekohtml")) integTestRuntimeOnly(project(":ivy")) integTestRuntimeOnly(project(":maven")) integTestRuntimeOnly(project(":resourcesS3")) integTestRuntimeOnly(project(":resourcesSftp")) integTestRuntimeOnly(project(":testKit")) testFixturesImplementation(project(":internalIntegTesting")) } gradlebuildJava { // TODO Source Compatibility was set to 1.6. Why? Is this worker code? moduleType = ModuleType.WORKER } testFixtures { from(":core") from(":messaging") from(":modelCore") from(":versionControl") } testFilesCleanup { isErrorWhenNotEmpty = false } val classpathManifest by tasks.getting(ClasspathManifest::class) { additionalProjects = listOf(project(":runtimeApiInfo")) }
import org.gradle.build.ClasspathManifest import org.gradle.gradlebuild.testing.integrationtests.cleanup.EmptyDirectoryCheck import org.gradle.gradlebuild.unittestandcompile.ModuleType plugins { `java-library` } dependencies { api(project(":core")) api(project(":versionControl")) implementation(project(":resources")) implementation(project(":resourcesHttp")) implementation(library("asm")) implementation(library("asm_commons")) implementation(library("asm_util")) implementation(library("commons_lang")) implementation(library("commons_io")) implementation(library("ivy")) implementation(library("slf4j_api")) implementation(library("gson")) implementation(library("jcip")) implementation(library("maven3")) runtimeOnly(library("bouncycastle_provider")) runtimeOnly(project(":installationBeacon")) testImplementation(library("nekohtml")) integTestRuntimeOnly(project(":ivy")) integTestRuntimeOnly(project(":maven")) integTestRuntimeOnly(project(":resourcesS3")) integTestRuntimeOnly(project(":resourcesSftp")) integTestRuntimeOnly(project(":testKit")) testFixturesImplementation(project(":internalIntegTesting")) } gradlebuildJava { moduleType = ModuleType.CORE } testFixtures { from(":core") from(":messaging") from(":modelCore") from(":versionControl") } testFilesCleanup { isErrorWhenNotEmpty = false } val classpathManifest by tasks.getting(ClasspathManifest::class) { additionalProjects = listOf(project(":runtimeApiInfo")) }
Change module type of dependency-management project to CORE
Change module type of dependency-management project to CORE
Kotlin
apache-2.0
blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle
kotlin
## Code Before: import org.gradle.build.ClasspathManifest import org.gradle.gradlebuild.testing.integrationtests.cleanup.EmptyDirectoryCheck import org.gradle.gradlebuild.unittestandcompile.ModuleType plugins { `java-library` } dependencies { api(project(":core")) api(project(":versionControl")) implementation(project(":resources")) implementation(project(":resourcesHttp")) implementation(library("asm")) implementation(library("asm_commons")) implementation(library("asm_util")) implementation(library("commons_lang")) implementation(library("commons_io")) implementation(library("ivy")) implementation(library("slf4j_api")) implementation(library("gson")) implementation(library("jcip")) implementation(library("maven3")) runtimeOnly(library("bouncycastle_provider")) runtimeOnly(project(":installationBeacon")) testImplementation(library("nekohtml")) integTestRuntimeOnly(project(":ivy")) integTestRuntimeOnly(project(":maven")) integTestRuntimeOnly(project(":resourcesS3")) integTestRuntimeOnly(project(":resourcesSftp")) integTestRuntimeOnly(project(":testKit")) testFixturesImplementation(project(":internalIntegTesting")) } gradlebuildJava { // TODO Source Compatibility was set to 1.6. Why? Is this worker code? moduleType = ModuleType.WORKER } testFixtures { from(":core") from(":messaging") from(":modelCore") from(":versionControl") } testFilesCleanup { isErrorWhenNotEmpty = false } val classpathManifest by tasks.getting(ClasspathManifest::class) { additionalProjects = listOf(project(":runtimeApiInfo")) } ## Instruction: Change module type of dependency-management project to CORE ## Code After: import org.gradle.build.ClasspathManifest import org.gradle.gradlebuild.testing.integrationtests.cleanup.EmptyDirectoryCheck import org.gradle.gradlebuild.unittestandcompile.ModuleType plugins { `java-library` } dependencies { api(project(":core")) api(project(":versionControl")) implementation(project(":resources")) implementation(project(":resourcesHttp")) implementation(library("asm")) implementation(library("asm_commons")) implementation(library("asm_util")) implementation(library("commons_lang")) implementation(library("commons_io")) implementation(library("ivy")) implementation(library("slf4j_api")) implementation(library("gson")) implementation(library("jcip")) implementation(library("maven3")) runtimeOnly(library("bouncycastle_provider")) runtimeOnly(project(":installationBeacon")) testImplementation(library("nekohtml")) integTestRuntimeOnly(project(":ivy")) integTestRuntimeOnly(project(":maven")) integTestRuntimeOnly(project(":resourcesS3")) integTestRuntimeOnly(project(":resourcesSftp")) integTestRuntimeOnly(project(":testKit")) testFixturesImplementation(project(":internalIntegTesting")) } gradlebuildJava { moduleType = ModuleType.CORE } testFixtures { from(":core") from(":messaging") from(":modelCore") from(":versionControl") } testFilesCleanup { isErrorWhenNotEmpty = false } val classpathManifest by tasks.getting(ClasspathManifest::class) { additionalProjects = listOf(project(":runtimeApiInfo")) }
# ... existing code ... } gradlebuildJava { moduleType = ModuleType.CORE } testFixtures { # ... rest of the code ...
d74b524cec824e77adbcf9cc23e28a6efba02985
takePicture.py
takePicture.py
import picamera as p import os import time os.chdir('/home/pi/Desktop') cam = p.PiCamera() cam.resolution = (320,240) cam.hflip = True cam.vflip = True x = 0 while x < 15: img = cam.capture('gregTest.jpg') time.sleep(.25) x +=1 exit()
import picamera as p import os import time os.chdir('/home/pi/Desktop') cam = p.PiCamera() cam.resolution = (320,240) cam.hflip = True cam.vflip = True x = 0 while x < 50: img = cam.capture('tempGregTest.jpg') os.unlink('gregTest.jpg') os.rename('tempGregTest.jpg','gregTest.jpg') time.sleep(.25) x +=1 exit()
Add temp pic file sequence to takepicture file
Add temp pic file sequence to takepicture file
Python
mit
jwarshaw/RaspberryDrive
python
## Code Before: import picamera as p import os import time os.chdir('/home/pi/Desktop') cam = p.PiCamera() cam.resolution = (320,240) cam.hflip = True cam.vflip = True x = 0 while x < 15: img = cam.capture('gregTest.jpg') time.sleep(.25) x +=1 exit() ## Instruction: Add temp pic file sequence to takepicture file ## Code After: import picamera as p import os import time os.chdir('/home/pi/Desktop') cam = p.PiCamera() cam.resolution = (320,240) cam.hflip = True cam.vflip = True x = 0 while x < 50: img = cam.capture('tempGregTest.jpg') os.unlink('gregTest.jpg') os.rename('tempGregTest.jpg','gregTest.jpg') time.sleep(.25) x +=1 exit()
... cam.vflip = True x = 0 while x < 50: img = cam.capture('tempGregTest.jpg') os.unlink('gregTest.jpg') os.rename('tempGregTest.jpg','gregTest.jpg') time.sleep(.25) x +=1 ...
557e634a3b68c13b1a19151ec3b96f456e17d347
penelophant/database.py
penelophant/database.py
""" Database Module """ from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy()
""" Database Module """ from flask_sqlalchemy import SQLAlchemy from penelophant import app db = SQLAlchemy(app)
Attach app to SQLAlchemy properly
Attach app to SQLAlchemy properly
Python
apache-2.0
kevinoconnor7/penelophant,kevinoconnor7/penelophant
python
## Code Before: """ Database Module """ from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy() ## Instruction: Attach app to SQLAlchemy properly ## Code After: """ Database Module """ from flask_sqlalchemy import SQLAlchemy from penelophant import app db = SQLAlchemy(app)
# ... existing code ... """ Database Module """ from flask_sqlalchemy import SQLAlchemy from penelophant import app db = SQLAlchemy(app) # ... rest of the code ...
6b179dc4fb95f4db380b9156381b6210adeef2e5
conftest.py
conftest.py
import os import mock import pytest PROJECT = os.environ['GCLOUD_PROJECT'] @pytest.fixture def api_client_inject_project_id(): """Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with the project ID.""" import googleapiclient.http old_execute = googleapiclient.http.HttpRequest.execute def new_execute(self, http=None, num_retries=0): self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT) return old_execute(self, http=http, num_retries=num_retries) with mock.patch( 'googleapiclient.http.HttpRequest.execute', new=new_execute): yield
import os import mock import pytest PROJECT = 'python-docs-samples' @pytest.fixture def api_client_inject_project_id(): """Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with the project ID.""" import googleapiclient.http old_execute = googleapiclient.http.HttpRequest.execute def new_execute(self, http=None, num_retries=0): self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT) return old_execute(self, http=http, num_retries=num_retries) with mock.patch( 'googleapiclient.http.HttpRequest.execute', new=new_execute): yield
Set the Project in code
Set the Project in code
Python
apache-2.0
GoogleCloudPlatform/getting-started-python,GoogleCloudPlatform/getting-started-python,GoogleCloudPlatform/getting-started-python
python
## Code Before: import os import mock import pytest PROJECT = os.environ['GCLOUD_PROJECT'] @pytest.fixture def api_client_inject_project_id(): """Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with the project ID.""" import googleapiclient.http old_execute = googleapiclient.http.HttpRequest.execute def new_execute(self, http=None, num_retries=0): self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT) return old_execute(self, http=http, num_retries=num_retries) with mock.patch( 'googleapiclient.http.HttpRequest.execute', new=new_execute): yield ## Instruction: Set the Project in code ## Code After: import os import mock import pytest PROJECT = 'python-docs-samples' @pytest.fixture def api_client_inject_project_id(): """Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with the project ID.""" import googleapiclient.http old_execute = googleapiclient.http.HttpRequest.execute def new_execute(self, http=None, num_retries=0): self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT) return old_execute(self, http=http, num_retries=num_retries) with mock.patch( 'googleapiclient.http.HttpRequest.execute', new=new_execute): yield
# ... existing code ... import mock import pytest PROJECT = 'python-docs-samples' @pytest.fixture # ... rest of the code ...
ebc81c55c26df4702219cd05ba663b355a8cfc4a
.teamcity/_Self/vcsRoots/HttpsGithubComJetBrainsIdeavimPullRequests.kt
.teamcity/_Self/vcsRoots/HttpsGithubComJetBrainsIdeavimPullRequests.kt
package _Self.vcsRoots import jetbrains.buildServer.configs.kotlin.v2019_2.* import jetbrains.buildServer.configs.kotlin.v2019_2.vcs.GitVcsRoot object HttpsGithubComJetBrainsIdeavimPullRequests : GitVcsRoot({ name = "https://github.com/JetBrains/ideavim (Pull Requests)" url = "https://github.com/JetBrains/ideavim" branchSpec = "+:refs/(pull/*/merge)" authMethod = password { userName = "AlexPl292" } })
package _Self.vcsRoots import jetbrains.buildServer.configs.kotlin.v2019_2.* import jetbrains.buildServer.configs.kotlin.v2019_2.vcs.GitVcsRoot object HttpsGithubComJetBrainsIdeavimPullRequests : GitVcsRoot({ name = "IdeaVim Pull Requests" url = "[email protected]:JetBrains/ideavim.git" branchSpec = "+:refs/(pull/*/merge)" authMethod = uploadedKey { uploadedKey = "Alex Plate TeamCity key" } })
Update GitHub Pull Requests build
Update GitHub Pull Requests build
Kotlin
mit
JetBrains/ideavim,JetBrains/ideavim
kotlin
## Code Before: package _Self.vcsRoots import jetbrains.buildServer.configs.kotlin.v2019_2.* import jetbrains.buildServer.configs.kotlin.v2019_2.vcs.GitVcsRoot object HttpsGithubComJetBrainsIdeavimPullRequests : GitVcsRoot({ name = "https://github.com/JetBrains/ideavim (Pull Requests)" url = "https://github.com/JetBrains/ideavim" branchSpec = "+:refs/(pull/*/merge)" authMethod = password { userName = "AlexPl292" } }) ## Instruction: Update GitHub Pull Requests build ## Code After: package _Self.vcsRoots import jetbrains.buildServer.configs.kotlin.v2019_2.* import jetbrains.buildServer.configs.kotlin.v2019_2.vcs.GitVcsRoot object HttpsGithubComJetBrainsIdeavimPullRequests : GitVcsRoot({ name = "IdeaVim Pull Requests" url = "[email protected]:JetBrains/ideavim.git" branchSpec = "+:refs/(pull/*/merge)" authMethod = uploadedKey { uploadedKey = "Alex Plate TeamCity key" } })
# ... existing code ... import jetbrains.buildServer.configs.kotlin.v2019_2.vcs.GitVcsRoot object HttpsGithubComJetBrainsIdeavimPullRequests : GitVcsRoot({ name = "IdeaVim Pull Requests" url = "[email protected]:JetBrains/ideavim.git" branchSpec = "+:refs/(pull/*/merge)" authMethod = uploadedKey { uploadedKey = "Alex Plate TeamCity key" } }) # ... rest of the code ...
33c03c8d50524dca3b9c5990958a0b44e9fe399e
isserviceup/services/models/statuspage.py
isserviceup/services/models/statuspage.py
import requests from bs4 import BeautifulSoup from isserviceup.services.models.service import Service, Status class StatusPagePlugin(Service): def get_status(self): r = requests.get(self.status_url) if r.status_code != 200: return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') status = next(x for x in b.find(class_='page-status').attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok elif status == 'status-critical': return Status.critical elif status == 'status-major': return Status.major elif status == 'status-minor': return Status.minor elif status == 'status-maintenance': return Status.maintenance else: raise Exception('unexpected status')
import requests from bs4 import BeautifulSoup from isserviceup.services.models.service import Service, Status class StatusPagePlugin(Service): def get_status(self): r = requests.get(self.status_url) if r.status_code != 200: return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') page_status = b.find(class_='page-status') if page_status is None: if b.find(class_='unresolved-incidents'): return Status.major status = next(x for x in page_status.attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok elif status == 'status-critical': return Status.critical elif status == 'status-major': return Status.major elif status == 'status-minor': return Status.minor elif status == 'status-maintenance': return Status.maintenance else: raise Exception('unexpected status')
Use unresolved-incidents when page-status is empty
Use unresolved-incidents when page-status is empty
Python
apache-2.0
marcopaz/is-service-up,marcopaz/is-service-up,marcopaz/is-service-up
python
## Code Before: import requests from bs4 import BeautifulSoup from isserviceup.services.models.service import Service, Status class StatusPagePlugin(Service): def get_status(self): r = requests.get(self.status_url) if r.status_code != 200: return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') status = next(x for x in b.find(class_='page-status').attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok elif status == 'status-critical': return Status.critical elif status == 'status-major': return Status.major elif status == 'status-minor': return Status.minor elif status == 'status-maintenance': return Status.maintenance else: raise Exception('unexpected status') ## Instruction: Use unresolved-incidents when page-status is empty ## Code After: import requests from bs4 import BeautifulSoup from isserviceup.services.models.service import Service, Status class StatusPagePlugin(Service): def get_status(self): r = requests.get(self.status_url) if r.status_code != 200: return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') page_status = b.find(class_='page-status') if page_status is None: if b.find(class_='unresolved-incidents'): return Status.major status = next(x for x in page_status.attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok elif status == 'status-critical': return Status.critical elif status == 'status-major': return Status.major elif status == 'status-minor': return Status.minor elif status == 'status-maintenance': return Status.maintenance else: raise Exception('unexpected status')
# ... existing code ... return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') page_status = b.find(class_='page-status') if page_status is None: if b.find(class_='unresolved-incidents'): return Status.major status = next(x for x in page_status.attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok # ... rest of the code ...
79d78e477e8cf64e7d4cd86470df3c251f6d8376
prequ/locations.py
prequ/locations.py
import os from shutil import rmtree from .click import secho from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the # preferred place to store caches for any platform. This has been addressed # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly # to the user when this directory is still found. LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') if os.path.exists(LEGACY_CACHE_DIR): secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') rmtree(LEGACY_CACHE_DIR)
from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ')
Remove migration code of pip-tools legacy cache
Remove migration code of pip-tools legacy cache It's not a responsibility of Prequ to remove legacy cache dir of pip-tools.
Python
bsd-2-clause
suutari-ai/prequ,suutari/prequ,suutari/prequ
python
## Code Before: import os from shutil import rmtree from .click import secho from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the # preferred place to store caches for any platform. This has been addressed # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly # to the user when this directory is still found. LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') if os.path.exists(LEGACY_CACHE_DIR): secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') rmtree(LEGACY_CACHE_DIR) ## Instruction: Remove migration code of pip-tools legacy cache It's not a responsibility of Prequ to remove legacy cache dir of pip-tools. ## Code After: from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ')
// ... existing code ... from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') // ... rest of the code ...
34f818ebe0539bf77576f7b45d28a70160271754
program.c
program.c
/* Preprocessor defines added by opencl compiler * #define CONFIGS_PER_PROC */ __kernel void start_trampoline(__global char *match_configs, __global char *output) { __private unsigned int i; for (i = 0; i < 256; i++) { output[i] = CONFIGS_PER_PROC; } write_mem_fence(CLK_GLOBAL_MEM_FENCE); return; }
/* Preprocessor defines added by opencl compiler * #define CONFIGS_PER_PROC */ __kernel void start_trampoline(__global char *match_configs, __global char *output) { __private unsigned int i, startloc; // Per worker match configs. __private char local_match_configs[CONFIGS_PER_PROC * sizeof(char) * 4]; // Read in per worker match configs startloc = get_local_id(0) * CONFIGS_PER_PROC * 4; for (i = 0; i < CONFIGS_PER_PROC * 4; i++) local_match_configs[i] = match_configs[startloc + i]; for (i = 0; i < 256; i++) { output[i] = CONFIGS_PER_PROC; } write_mem_fence(CLK_GLOBAL_MEM_FENCE); return; }
Load match configs into each local proc.
Load match configs into each local proc.
C
bsd-2-clause
jmorse/worms,jmorse/worms
c
## Code Before: /* Preprocessor defines added by opencl compiler * #define CONFIGS_PER_PROC */ __kernel void start_trampoline(__global char *match_configs, __global char *output) { __private unsigned int i; for (i = 0; i < 256; i++) { output[i] = CONFIGS_PER_PROC; } write_mem_fence(CLK_GLOBAL_MEM_FENCE); return; } ## Instruction: Load match configs into each local proc. ## Code After: /* Preprocessor defines added by opencl compiler * #define CONFIGS_PER_PROC */ __kernel void start_trampoline(__global char *match_configs, __global char *output) { __private unsigned int i, startloc; // Per worker match configs. __private char local_match_configs[CONFIGS_PER_PROC * sizeof(char) * 4]; // Read in per worker match configs startloc = get_local_id(0) * CONFIGS_PER_PROC * 4; for (i = 0; i < CONFIGS_PER_PROC * 4; i++) local_match_configs[i] = match_configs[startloc + i]; for (i = 0; i < 256; i++) { output[i] = CONFIGS_PER_PROC; } write_mem_fence(CLK_GLOBAL_MEM_FENCE); return; }
... __kernel void start_trampoline(__global char *match_configs, __global char *output) { __private unsigned int i, startloc; // Per worker match configs. __private char local_match_configs[CONFIGS_PER_PROC * sizeof(char) * 4]; // Read in per worker match configs startloc = get_local_id(0) * CONFIGS_PER_PROC * 4; for (i = 0; i < CONFIGS_PER_PROC * 4; i++) local_match_configs[i] = match_configs[startloc + i]; for (i = 0; i < 256; i++) { output[i] = CONFIGS_PER_PROC; } ...
7ae85d9581096fbbbdc6af0d11aac697eec19400
app/src/main/java/com/neeplayer/ui/common/BindingUtils.kt
app/src/main/java/com/neeplayer/ui/common/BindingUtils.kt
package com.neeplayer.ui.common import android.databinding.BindingAdapter import android.graphics.drawable.Drawable import android.widget.ImageView import android.widget.TextView import com.bumptech.glide.Glide import java.util.concurrent.TimeUnit @BindingAdapter("imagePath") fun loadImageFile(view: ImageView, path: String?) = loadImage(view, "file://" + path) @BindingAdapter("imageURL") fun loadImage(view: ImageView, url: String?) = Glide.with(view.context).load(url).dontAnimate().into(view) @BindingAdapter("drawable") fun setDrawable(view: ImageView, drawable: Drawable) = view.setImageDrawable(drawable) @BindingAdapter("duration") fun setFormattedDuration(view: TextView, duration: Int) { val min = TimeUnit.MILLISECONDS.toMinutes(duration.toLong()) val sec = TimeUnit.MILLISECONDS.toSeconds(duration.toLong()) - TimeUnit.MINUTES.toSeconds(min) view.text = "%d:%02d".format(min, sec) }
package com.neeplayer.ui.common import android.databinding.BindingAdapter import android.graphics.drawable.Drawable import android.widget.ImageView import android.widget.TextView import com.bumptech.glide.Glide import java.util.concurrent.TimeUnit @BindingAdapter("imagePath") fun loadImageFile(view: ImageView, path: String?) = loadImage(view, "file://" + path) @BindingAdapter("imageURL") fun loadImage(view: ImageView, url: String?) = Glide.with(view.context).load(url).into(view) @BindingAdapter("drawable") fun setDrawable(view: ImageView, drawable: Drawable) = view.setImageDrawable(drawable) @BindingAdapter("duration") fun setFormattedDuration(view: TextView, duration: Int) { val min = TimeUnit.MILLISECONDS.toMinutes(duration.toLong()) val sec = TimeUnit.MILLISECONDS.toSeconds(duration.toLong()) - TimeUnit.MINUTES.toSeconds(min) view.text = "%d:%02d".format(min, sec) }
Enable animation for image loading
Enable animation for image loading
Kotlin
mit
daugeldauge/NeePlayer
kotlin
## Code Before: package com.neeplayer.ui.common import android.databinding.BindingAdapter import android.graphics.drawable.Drawable import android.widget.ImageView import android.widget.TextView import com.bumptech.glide.Glide import java.util.concurrent.TimeUnit @BindingAdapter("imagePath") fun loadImageFile(view: ImageView, path: String?) = loadImage(view, "file://" + path) @BindingAdapter("imageURL") fun loadImage(view: ImageView, url: String?) = Glide.with(view.context).load(url).dontAnimate().into(view) @BindingAdapter("drawable") fun setDrawable(view: ImageView, drawable: Drawable) = view.setImageDrawable(drawable) @BindingAdapter("duration") fun setFormattedDuration(view: TextView, duration: Int) { val min = TimeUnit.MILLISECONDS.toMinutes(duration.toLong()) val sec = TimeUnit.MILLISECONDS.toSeconds(duration.toLong()) - TimeUnit.MINUTES.toSeconds(min) view.text = "%d:%02d".format(min, sec) } ## Instruction: Enable animation for image loading ## Code After: package com.neeplayer.ui.common import android.databinding.BindingAdapter import android.graphics.drawable.Drawable import android.widget.ImageView import android.widget.TextView import com.bumptech.glide.Glide import java.util.concurrent.TimeUnit @BindingAdapter("imagePath") fun loadImageFile(view: ImageView, path: String?) = loadImage(view, "file://" + path) @BindingAdapter("imageURL") fun loadImage(view: ImageView, url: String?) = Glide.with(view.context).load(url).into(view) @BindingAdapter("drawable") fun setDrawable(view: ImageView, drawable: Drawable) = view.setImageDrawable(drawable) @BindingAdapter("duration") fun setFormattedDuration(view: TextView, duration: Int) { val min = TimeUnit.MILLISECONDS.toMinutes(duration.toLong()) val sec = TimeUnit.MILLISECONDS.toSeconds(duration.toLong()) - TimeUnit.MINUTES.toSeconds(min) view.text = "%d:%02d".format(min, sec) }
... fun loadImageFile(view: ImageView, path: String?) = loadImage(view, "file://" + path) @BindingAdapter("imageURL") fun loadImage(view: ImageView, url: String?) = Glide.with(view.context).load(url).into(view) @BindingAdapter("drawable") fun setDrawable(view: ImageView, drawable: Drawable) = view.setImageDrawable(drawable) ...
eefe1ac289db85464d3d7f2f8ce3dbf8ab94487f
setup.py
setup.py
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = "UCLDC Deep Harvester", version = "0.0.1", description = ("deep harvester code for the UCLDC project"), long_description=read('README.md'), author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux' ], install_requires=[ 'boto', 'pynux', 'python-magic' ], packages=['deepharvest'], test_suite='tests' )
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = "UCLDC Deep Harvester", version = "0.0.1", description = ("deep harvester code for the UCLDC project"), long_description=read('README.md'), author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux', 'https://github.com/barbarahui/nuxeo-calisphere/archive/master.zip#egg=UCLDC-Deep-Harvester' ], install_requires=[ 'boto', 'pynux', 'python-magic', 'UCLDC-Deep-Harvester' ], packages=['deepharvest'], test_suite='tests' )
Add UCLDC-Deep-Harvester as a dependency.
Add UCLDC-Deep-Harvester as a dependency.
Python
bsd-3-clause
barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere
python
## Code Before: import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = "UCLDC Deep Harvester", version = "0.0.1", description = ("deep harvester code for the UCLDC project"), long_description=read('README.md'), author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux' ], install_requires=[ 'boto', 'pynux', 'python-magic' ], packages=['deepharvest'], test_suite='tests' ) ## Instruction: Add UCLDC-Deep-Harvester as a dependency. ## Code After: import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = "UCLDC Deep Harvester", version = "0.0.1", description = ("deep harvester code for the UCLDC project"), long_description=read('README.md'), author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux', 'https://github.com/barbarahui/nuxeo-calisphere/archive/master.zip#egg=UCLDC-Deep-Harvester' ], install_requires=[ 'boto', 'pynux', 'python-magic', 'UCLDC-Deep-Harvester' ], packages=['deepharvest'], test_suite='tests' )
// ... existing code ... author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux', 'https://github.com/barbarahui/nuxeo-calisphere/archive/master.zip#egg=UCLDC-Deep-Harvester' ], install_requires=[ 'boto', 'pynux', 'python-magic', 'UCLDC-Deep-Harvester' ], packages=['deepharvest'], test_suite='tests' // ... rest of the code ...
da937ef56cbbbaadbd56137ab0e642b55fc27de2
src/main/java/com/ft/membership/crypto/signature/Encoder.java
src/main/java/com/ft/membership/crypto/signature/Encoder.java
package com.ft.membership.crypto.signature; import java.nio.charset.StandardCharsets; import java.util.Base64; public class Encoder { private static final Base64.Encoder BASE_64_ENCODER = Base64.getUrlEncoder().withoutPadding(); private static final Base64.Decoder BASE_64_DECODER = Base64.getUrlDecoder(); public static String getBase64EncodedString(final byte[] bytes) { return new String(BASE_64_ENCODER.encode(bytes), StandardCharsets.UTF_8); } public static byte[] getBase64DecodedBytes(final String encodedString) { return BASE_64_DECODER.decode(encodedString.getBytes(StandardCharsets.UTF_8)); } }
package com.ft.membership.crypto.signature; import java.nio.charset.StandardCharsets; import java.util.Base64; public class Encoder { private static final Base64.Encoder BASE_64_ENCODER = Base64.getUrlEncoder().withoutPadding(); private static final Base64.Decoder BASE_64_DECODER = Base64.getUrlDecoder(); public static String getBase64EncodedString(final byte[] bytes) { return new String(BASE_64_ENCODER.encode(bytes), StandardCharsets.UTF_8); } public static byte[] getBase64DecodedBytes(final String encodedString) { try { return BASE_64_DECODER.decode(encodedString.getBytes(StandardCharsets.UTF_8)); } catch(IllegalArgumentException e) { // We do not want a RuntimeException to be thrown when the string passed is not in valid Base64 scheme // as bad input is possible to the lib methods. return null; } } }
Handle IllegalArgumentException due to bad input
Handle IllegalArgumentException due to bad input
Java
mit
Financial-Times/crypto-signatures
java
## Code Before: package com.ft.membership.crypto.signature; import java.nio.charset.StandardCharsets; import java.util.Base64; public class Encoder { private static final Base64.Encoder BASE_64_ENCODER = Base64.getUrlEncoder().withoutPadding(); private static final Base64.Decoder BASE_64_DECODER = Base64.getUrlDecoder(); public static String getBase64EncodedString(final byte[] bytes) { return new String(BASE_64_ENCODER.encode(bytes), StandardCharsets.UTF_8); } public static byte[] getBase64DecodedBytes(final String encodedString) { return BASE_64_DECODER.decode(encodedString.getBytes(StandardCharsets.UTF_8)); } } ## Instruction: Handle IllegalArgumentException due to bad input ## Code After: package com.ft.membership.crypto.signature; import java.nio.charset.StandardCharsets; import java.util.Base64; public class Encoder { private static final Base64.Encoder BASE_64_ENCODER = Base64.getUrlEncoder().withoutPadding(); private static final Base64.Decoder BASE_64_DECODER = Base64.getUrlDecoder(); public static String getBase64EncodedString(final byte[] bytes) { return new String(BASE_64_ENCODER.encode(bytes), StandardCharsets.UTF_8); } public static byte[] getBase64DecodedBytes(final String encodedString) { try { return BASE_64_DECODER.decode(encodedString.getBytes(StandardCharsets.UTF_8)); } catch(IllegalArgumentException e) { // We do not want a RuntimeException to be thrown when the string passed is not in valid Base64 scheme // as bad input is possible to the lib methods. return null; } } }
... public static byte[] getBase64DecodedBytes(final String encodedString) { try { return BASE_64_DECODER.decode(encodedString.getBytes(StandardCharsets.UTF_8)); } catch(IllegalArgumentException e) { // We do not want a RuntimeException to be thrown when the string passed is not in valid Base64 scheme // as bad input is possible to the lib methods. return null; } } } ...
1e6dc553f757014bb4a194c55eabc44364674fb9
setup.py
setup.py
from setuptools import setup, find_packages # Dynamically calculate the version version = __import__('device_inventory').get_version() # Collect installation requirements with open('requirements.txt') as reqf: import re dep_re = re.compile(r'^([^\s#]+)') inst_reqs = [m.group(0) for m in [dep_re.match(l) for l in reqf] if m] setup( name="device-inventory", version=version, packages=find_packages(), license='AGPLv3 License', description=('The Device Inventory is a tool to help the inventory ' 'of computers. It retrieves details of the hardware ' 'information and, optionally, runs some health and ' 'benchmark tests.'), scripts=['scripts/device-inventory', 'scripts/di-stress-test'], package_data={'device_inventory': [ 'config.ini', 'config_logging.json', 'data/*' ]}, url='https://github.com/eReuse/device-inventory', author='eReuse team', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Logging', 'Topic :: Utilities', ], install_requires=inst_reqs, )
import re from setuptools import setup, find_packages # Dynamically calculate the version version = __import__('device_inventory').get_version() # Collect installation requirements def read_requirements(path): with open(path) as reqf: dep_re = re.compile(r'^([^\s#]+)') return [m.group(0) for m in (dep_re.match(l) for l in reqf) if m] inst_reqs = read_requirements('requirements.txt') setup( name="device-inventory", version=version, packages=find_packages(), license='AGPLv3 License', description=('The Device Inventory is a tool to help the inventory ' 'of computers. It retrieves details of the hardware ' 'information and, optionally, runs some health and ' 'benchmark tests.'), scripts=['scripts/device-inventory', 'scripts/di-stress-test'], package_data={'device_inventory': [ 'config.ini', 'config_logging.json', 'data/*' ]}, url='https://github.com/eReuse/device-inventory', author='eReuse team', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Logging', 'Topic :: Utilities', ], install_requires=inst_reqs, )
Move requirements read code info function
Move requirements read code info function
Python
agpl-3.0
eReuse/workbench,eReuse/device-inventory,eReuse/workbench,eReuse/device-inventory
python
## Code Before: from setuptools import setup, find_packages # Dynamically calculate the version version = __import__('device_inventory').get_version() # Collect installation requirements with open('requirements.txt') as reqf: import re dep_re = re.compile(r'^([^\s#]+)') inst_reqs = [m.group(0) for m in [dep_re.match(l) for l in reqf] if m] setup( name="device-inventory", version=version, packages=find_packages(), license='AGPLv3 License', description=('The Device Inventory is a tool to help the inventory ' 'of computers. It retrieves details of the hardware ' 'information and, optionally, runs some health and ' 'benchmark tests.'), scripts=['scripts/device-inventory', 'scripts/di-stress-test'], package_data={'device_inventory': [ 'config.ini', 'config_logging.json', 'data/*' ]}, url='https://github.com/eReuse/device-inventory', author='eReuse team', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Logging', 'Topic :: Utilities', ], install_requires=inst_reqs, ) ## Instruction: Move requirements read code info function ## Code After: import re from setuptools import setup, find_packages # Dynamically calculate the version version = __import__('device_inventory').get_version() # Collect installation requirements def read_requirements(path): with open(path) as reqf: dep_re = re.compile(r'^([^\s#]+)') return [m.group(0) for m in (dep_re.match(l) for l in reqf) if m] inst_reqs = read_requirements('requirements.txt') setup( name="device-inventory", version=version, packages=find_packages(), license='AGPLv3 License', description=('The Device Inventory is a tool to help the inventory ' 'of computers. It retrieves details of the hardware ' 'information and, optionally, runs some health and ' 'benchmark tests.'), scripts=['scripts/device-inventory', 'scripts/di-stress-test'], package_data={'device_inventory': [ 'config.ini', 'config_logging.json', 'data/*' ]}, url='https://github.com/eReuse/device-inventory', author='eReuse team', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Logging', 'Topic :: Utilities', ], install_requires=inst_reqs, )
... import re from setuptools import setup, find_packages ... version = __import__('device_inventory').get_version() # Collect installation requirements def read_requirements(path): with open(path) as reqf: dep_re = re.compile(r'^([^\s#]+)') return [m.group(0) for m in (dep_re.match(l) for l in reqf) if m] inst_reqs = read_requirements('requirements.txt') setup( name="device-inventory", ...
71cc3cf500a9db7a96aa5f1a6c19c387cf0ad4ec
fickle/backend.py
fickle/backend.py
import sklearn.cross_validation class Backend(object): def __init__(self): self.dataset_id = 0 self.dataset = None self.model = None def load(self, dataset): self.model = None self.dataset_id += 1 self.dataset = dataset self._data = dataset['data'] self._target = dataset['target'] return True def loaded(self): return (self.dataset is not None) def fit(self): if not self.loaded(): return False model = self.classifier() model.fit(self._data, self._target) self.model = model return True def trained(self): return (self.model is not None) def validate(self, test_size = 0.2, random_state = 0): if not self.loaded(): return model = self.classifier() X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split( self._data, self._target, test_size = test_size, random_state = random_state ) model.fit(X_train, y_train) return [model.score(X_test, y_test)] def predict(self, value): return self.model.predict(value)
import sklearn.cross_validation class Backend(object): def __init__(self): self.dataset_id = 0 self.random_id = 0 self.dataset = None self.model = None def load(self, dataset): self.model = None self.dataset_id += 1 self.dataset = dataset self._data = dataset['data'] self._target = dataset['target'] return True def loaded(self): return (self.dataset is not None) def fit(self): if not self.loaded(): return False model = self.classifier() model.fit(self._data, self._target) self.model = model return True def trained(self): return (self.model is not None) def validate(self, test_size = 0.2): if not self.loaded(): return self.random_id += 1 model = self.classifier() X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split( self._data, self._target, test_size = test_size, random_state = self.random_id ) model.fit(X_train, y_train) return [model.score(X_test, y_test)] def predict(self, value): return self.model.predict(value)
Validate with sequential random state
Validate with sequential random state
Python
mit
norbert/fickle
python
## Code Before: import sklearn.cross_validation class Backend(object): def __init__(self): self.dataset_id = 0 self.dataset = None self.model = None def load(self, dataset): self.model = None self.dataset_id += 1 self.dataset = dataset self._data = dataset['data'] self._target = dataset['target'] return True def loaded(self): return (self.dataset is not None) def fit(self): if not self.loaded(): return False model = self.classifier() model.fit(self._data, self._target) self.model = model return True def trained(self): return (self.model is not None) def validate(self, test_size = 0.2, random_state = 0): if not self.loaded(): return model = self.classifier() X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split( self._data, self._target, test_size = test_size, random_state = random_state ) model.fit(X_train, y_train) return [model.score(X_test, y_test)] def predict(self, value): return self.model.predict(value) ## Instruction: Validate with sequential random state ## Code After: import sklearn.cross_validation class Backend(object): def __init__(self): self.dataset_id = 0 self.random_id = 0 self.dataset = None self.model = None def load(self, dataset): self.model = None self.dataset_id += 1 self.dataset = dataset self._data = dataset['data'] self._target = dataset['target'] return True def loaded(self): return (self.dataset is not None) def fit(self): if not self.loaded(): return False model = self.classifier() model.fit(self._data, self._target) self.model = model return True def trained(self): return (self.model is not None) def validate(self, test_size = 0.2): if not self.loaded(): return self.random_id += 1 model = self.classifier() X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split( self._data, self._target, test_size = test_size, random_state = self.random_id ) model.fit(X_train, y_train) return [model.score(X_test, y_test)] def predict(self, value): return self.model.predict(value)
# ... existing code ... class Backend(object): def __init__(self): self.dataset_id = 0 self.random_id = 0 self.dataset = None self.model = None # ... modified code ... def trained(self): return (self.model is not None) def validate(self, test_size = 0.2): if not self.loaded(): return self.random_id += 1 model = self.classifier() X_train, X_test, y_train, y_test = sklearn.cross_validation.train_test_split( self._data, self._target, test_size = test_size, random_state = self.random_id ) model.fit(X_train, y_train) return [model.score(X_test, y_test)] # ... rest of the code ...
ca20ef80aeea17fa8f3ab671892e81ef6c603bae
src/main/java/com/choonster/testmod3/util/Constants.java
src/main/java/com/choonster/testmod3/util/Constants.java
package com.choonster.testmod3.util; import com.choonster.testmod3.TestMod3; import com.google.common.collect.ImmutableSet; import net.minecraft.inventory.EntityEquipmentSlot; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; public class Constants { public static final String RESOURCE_PREFIX = TestMod3.MODID + ":"; /** * The armour equipment slots. */ public static final Set<EntityEquipmentSlot> ARMOUR_SLOTS = ImmutableSet.copyOf( Stream.of(EntityEquipmentSlot.values()) .filter(equipmentSlot -> equipmentSlot.getSlotType() == EntityEquipmentSlot.Type.ARMOR) .collect(Collectors.toList()) ); }
package com.choonster.testmod3.util; import com.choonster.testmod3.TestMod3; import com.google.common.collect.ImmutableSet; import net.minecraft.entity.ai.attributes.AttributeModifier; import net.minecraft.inventory.EntityEquipmentSlot; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; public class Constants { public static final String RESOURCE_PREFIX = TestMod3.MODID + ":"; /** * The armour equipment slots. */ public static final Set<EntityEquipmentSlot> ARMOUR_SLOTS = ImmutableSet.copyOf( Stream.of(EntityEquipmentSlot.values()) .filter(equipmentSlot -> equipmentSlot.getSlotType() == EntityEquipmentSlot.Type.ARMOR) .collect(Collectors.toList()) ); /** * {@link AttributeModifier} operation 0. * <p> * Add the modifier's amount to the attribute's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_ADD = 0; /** * {@link AttributeModifier} operation 1. * <p> * Add the result of operation 0 multiplied by the modifier's amount to the attribute's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_MULTIPLY_OLD_AMOUNT = 1; /** * {@link AttributeModifier} operation 2. * <p> * Multiply the attribute's amount by 1.0 + the modifier's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_MULTIPLY_NEW_AMOUNT = 2; }
Add constants for AttributeModifier operations
Add constants for AttributeModifier operations
Java
mit
droidicus/AquaRegia
java
## Code Before: package com.choonster.testmod3.util; import com.choonster.testmod3.TestMod3; import com.google.common.collect.ImmutableSet; import net.minecraft.inventory.EntityEquipmentSlot; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; public class Constants { public static final String RESOURCE_PREFIX = TestMod3.MODID + ":"; /** * The armour equipment slots. */ public static final Set<EntityEquipmentSlot> ARMOUR_SLOTS = ImmutableSet.copyOf( Stream.of(EntityEquipmentSlot.values()) .filter(equipmentSlot -> equipmentSlot.getSlotType() == EntityEquipmentSlot.Type.ARMOR) .collect(Collectors.toList()) ); } ## Instruction: Add constants for AttributeModifier operations ## Code After: package com.choonster.testmod3.util; import com.choonster.testmod3.TestMod3; import com.google.common.collect.ImmutableSet; import net.minecraft.entity.ai.attributes.AttributeModifier; import net.minecraft.inventory.EntityEquipmentSlot; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; public class Constants { public static final String RESOURCE_PREFIX = TestMod3.MODID + ":"; /** * The armour equipment slots. */ public static final Set<EntityEquipmentSlot> ARMOUR_SLOTS = ImmutableSet.copyOf( Stream.of(EntityEquipmentSlot.values()) .filter(equipmentSlot -> equipmentSlot.getSlotType() == EntityEquipmentSlot.Type.ARMOR) .collect(Collectors.toList()) ); /** * {@link AttributeModifier} operation 0. * <p> * Add the modifier's amount to the attribute's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_ADD = 0; /** * {@link AttributeModifier} operation 1. * <p> * Add the result of operation 0 multiplied by the modifier's amount to the attribute's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_MULTIPLY_OLD_AMOUNT = 1; /** * {@link AttributeModifier} operation 2. * <p> * Multiply the attribute's amount by 1.0 + the modifier's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_MULTIPLY_NEW_AMOUNT = 2; }
... import com.choonster.testmod3.TestMod3; import com.google.common.collect.ImmutableSet; import net.minecraft.entity.ai.attributes.AttributeModifier; import net.minecraft.inventory.EntityEquipmentSlot; import java.util.Set; ... .filter(equipmentSlot -> equipmentSlot.getSlotType() == EntityEquipmentSlot.Type.ARMOR) .collect(Collectors.toList()) ); /** * {@link AttributeModifier} operation 0. * <p> * Add the modifier's amount to the attribute's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_ADD = 0; /** * {@link AttributeModifier} operation 1. * <p> * Add the result of operation 0 multiplied by the modifier's amount to the attribute's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_MULTIPLY_OLD_AMOUNT = 1; /** * {@link AttributeModifier} operation 2. * <p> * Multiply the attribute's amount by 1.0 + the modifier's amount. */ public static final int ATTRIBUTE_MODIFIER_OPERATION_MULTIPLY_NEW_AMOUNT = 2; } ...
a4cffc0e74f9dd972357eb9dc49a57e10f1fe944
core/forms.py
core/forms.py
from collections import namedtuple from django import forms IMAGE = "img" UploadType = namedtuple("UploadType", ["directory", "label"]) FILE_TYPE_CHOICES = ( UploadType(directory=IMAGE, label="Image"), UploadType(directory="thumb", label="Thumbnail"), UploadType(directory="doc", label="Document"), UploadType(directory="code", label="Code"), UploadType(directory="pres", label="Presentation"), ) class UploadForm(forms.Form): upload_file = forms.FileField() upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE)
from collections import namedtuple from django import forms IMAGE = "img" UploadType = namedtuple("UploadType", ["directory", "label"]) FILE_TYPE_CHOICES = ( UploadType(directory=IMAGE, label="Image"), UploadType(directory="thumb", label="Thumbnail"), UploadType(directory="doc", label="Document"), UploadType(directory="code", label="Code"), UploadType(directory="pres", label="Presentation"), ) class UploadForm(forms.Form): upload_file = forms.FileField() upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE) def clean_upload_file(self): data = self.cleaned_data["upload_file"] if " " in data.name: raise forms.ValidationError("Spaces in filename not allowed") return data
Check names of files for spaces
Check names of files for spaces
Python
bsd-3-clause
ahernp/DMCM,ahernp/DMCM,ahernp/DMCM
python
## Code Before: from collections import namedtuple from django import forms IMAGE = "img" UploadType = namedtuple("UploadType", ["directory", "label"]) FILE_TYPE_CHOICES = ( UploadType(directory=IMAGE, label="Image"), UploadType(directory="thumb", label="Thumbnail"), UploadType(directory="doc", label="Document"), UploadType(directory="code", label="Code"), UploadType(directory="pres", label="Presentation"), ) class UploadForm(forms.Form): upload_file = forms.FileField() upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE) ## Instruction: Check names of files for spaces ## Code After: from collections import namedtuple from django import forms IMAGE = "img" UploadType = namedtuple("UploadType", ["directory", "label"]) FILE_TYPE_CHOICES = ( UploadType(directory=IMAGE, label="Image"), UploadType(directory="thumb", label="Thumbnail"), UploadType(directory="doc", label="Document"), UploadType(directory="code", label="Code"), UploadType(directory="pres", label="Presentation"), ) class UploadForm(forms.Form): upload_file = forms.FileField() upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE) def clean_upload_file(self): data = self.cleaned_data["upload_file"] if " " in data.name: raise forms.ValidationError("Spaces in filename not allowed") return data
# ... existing code ... class UploadForm(forms.Form): upload_file = forms.FileField() upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE) def clean_upload_file(self): data = self.cleaned_data["upload_file"] if " " in data.name: raise forms.ValidationError("Spaces in filename not allowed") return data # ... rest of the code ...
4ae8302a3d91ca1e9601e0c51cb58a69f1c08cb5
setup.py
setup.py
"""bibpy module setup script for distribution.""" from __future__ import with_statement import os import distutils.core def get_version(filename): with open(filename) as fh: for line in fh: if line.startswith('__version__'): return line.split('=')[-1].strip()[1:-1] distutils.core.setup( name='bibpy', version=get_version(os.path.join('bibpy', '__init__.py')), author='Alexander Asp Bock', author_email='[email protected]', platforms='All', description=('Bib(la)tex parsing and useful tools'), license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.parsers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.5' 'Programming Language :: Python :: 3.6', ] )
"""bibpy module setup script for distribution.""" from __future__ import with_statement import os import distutils.core def get_version(filename): with open(filename) as fh: for line in fh: if line.startswith('__version__'): return line.split('=')[-1].strip()[1:-1] distutils.core.setup( name='bibpy', version=get_version(os.path.join('bibpy', '__init__.py')), author='Alexander Asp Bock', author_email='[email protected]', platforms='All', description=('Bib(la)tex parsing and useful tools'), license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.5' 'Programming Language :: Python :: 3.6', ] )
Remove 'bibpy.parsers' from package list
Remove 'bibpy.parsers' from package list
Python
mit
MisanthropicBit/bibpy,MisanthropicBit/bibpy
python
## Code Before: """bibpy module setup script for distribution.""" from __future__ import with_statement import os import distutils.core def get_version(filename): with open(filename) as fh: for line in fh: if line.startswith('__version__'): return line.split('=')[-1].strip()[1:-1] distutils.core.setup( name='bibpy', version=get_version(os.path.join('bibpy', '__init__.py')), author='Alexander Asp Bock', author_email='[email protected]', platforms='All', description=('Bib(la)tex parsing and useful tools'), license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.parsers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.5' 'Programming Language :: Python :: 3.6', ] ) ## Instruction: Remove 'bibpy.parsers' from package list ## Code After: """bibpy module setup script for distribution.""" from __future__ import with_statement import os import distutils.core def get_version(filename): with open(filename) as fh: for line in fh: if line.startswith('__version__'): return line.split('=')[-1].strip()[1:-1] distutils.core.setup( name='bibpy', version=get_version(os.path.join('bibpy', '__init__.py')), author='Alexander Asp Bock', author_email='[email protected]', platforms='All', description=('Bib(la)tex parsing and useful tools'), license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.5' 'Programming Language :: Python :: 3.6', ] )
// ... existing code ... license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ // ... rest of the code ...
67e6ad4c1fc955e51a5b2359b87e2587f8bae421
src/com/sibisoft/faizaniftikhartdd/Money.java
src/com/sibisoft/faizaniftikhartdd/Money.java
package com.sibisoft.faizaniftikhartdd; public class Money { protected int amount; public Money() { } public boolean equals(Object object) { Money money= (Money) object; return amount == money.amount; } }
package com.sibisoft.faizaniftikhartdd; public class Money { protected int amount; public Money() { } public boolean equals(Object object) { Money money= (Money) object; return amount == money.amount && getClass().equals(money.getClass()); } }
Add getClass() method to validate equals both same classes which make our equality test case succeed
Add getClass() method to validate equals both same classes which make our equality test case succeed
Java
mit
faizaniftikhar/faizan.iftikhartdd
java
## Code Before: package com.sibisoft.faizaniftikhartdd; public class Money { protected int amount; public Money() { } public boolean equals(Object object) { Money money= (Money) object; return amount == money.amount; } } ## Instruction: Add getClass() method to validate equals both same classes which make our equality test case succeed ## Code After: package com.sibisoft.faizaniftikhartdd; public class Money { protected int amount; public Money() { } public boolean equals(Object object) { Money money= (Money) object; return amount == money.amount && getClass().equals(money.getClass()); } }
# ... existing code ... public boolean equals(Object object) { Money money= (Money) object; return amount == money.amount && getClass().equals(money.getClass()); } } # ... rest of the code ...
9f8db061956fc73a197d9c5eb1b045a6e0655dc0
fc2json.py
fc2json.py
import sys, json file = sys.argv[1] subject = file.split('.')[0] data = { "subject": subject, "cards": {} } fc = [line.split(':') for line in open(file, 'r').read().splitlines()] js = open(subject + ".json", 'w') for line in fc: data["cards"][line[0]] = line[1] js.write(json.dumps(data)) js.close()
''' File: fc2json.py Author: Kristoffer Dalby Description: Tiny script for converting flashcard format to json. ''' import sys, json file = sys.argv[1] subject = file.split('.')[0] data = { "subject": subject, "cards": {} } fc = [line.split(':') for line in open(file, 'r').read().splitlines()] js = open(subject + ".json", 'w') for line in fc: data["cards"][line[0]] = line[1] js.write(json.dumps(data)) js.close()
Use a real JS construct, WTF knows why this works in chromium.
Use a real JS construct, WTF knows why this works in chromium.
Python
mit
kradalby/flashcards,kradalby/flashcards
python
## Code Before: import sys, json file = sys.argv[1] subject = file.split('.')[0] data = { "subject": subject, "cards": {} } fc = [line.split(':') for line in open(file, 'r').read().splitlines()] js = open(subject + ".json", 'w') for line in fc: data["cards"][line[0]] = line[1] js.write(json.dumps(data)) js.close() ## Instruction: Use a real JS construct, WTF knows why this works in chromium. ## Code After: ''' File: fc2json.py Author: Kristoffer Dalby Description: Tiny script for converting flashcard format to json. ''' import sys, json file = sys.argv[1] subject = file.split('.')[0] data = { "subject": subject, "cards": {} } fc = [line.split(':') for line in open(file, 'r').read().splitlines()] js = open(subject + ".json", 'w') for line in fc: data["cards"][line[0]] = line[1] js.write(json.dumps(data)) js.close()
... ''' File: fc2json.py Author: Kristoffer Dalby Description: Tiny script for converting flashcard format to json. ''' import sys, json ...
403ad86bde44c1a015d8d35ac2826221ef98f9da
drftest/shop/api/views.py
drftest/shop/api/views.py
from django.shortcuts import render from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework.permissions import IsAuthenticated class ShopAPIView(APIView): permission_classes = (IsAuthenticated,) class OrdersView(ShopAPIView): """ Place an order. Create the "order" record with general informations, create the "order_details" records with the details of the order. During this transaction any stamp earned by the user is added to the database and at the end voucher(s) are created if there are enough stamps available for that user. """ def post(self, request, format=None): return Response({'success': True})
from django.shortcuts import render from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework.permissions import IsAuthenticated from django.db import transaction class ShopAPIView(APIView): permission_classes = (IsAuthenticated,) class OrdersView(ShopAPIView): """ Place an order. Create the "order" record with general informations, create the "order_details" records with the details of the order. During this transaction any stamp earned by the user is added to the database and at the end voucher(s) are created if there are enough stamps available for that user. """ def post(self, request, format=None): with transaction.atomic(): return Response({'success': True})
Add transaction support to the orders view
Add transaction support to the orders view
Python
mit
andreagrandi/drf3-test,andreagrandi/drf3-test,andreagrandi/drf3-test
python
## Code Before: from django.shortcuts import render from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework.permissions import IsAuthenticated class ShopAPIView(APIView): permission_classes = (IsAuthenticated,) class OrdersView(ShopAPIView): """ Place an order. Create the "order" record with general informations, create the "order_details" records with the details of the order. During this transaction any stamp earned by the user is added to the database and at the end voucher(s) are created if there are enough stamps available for that user. """ def post(self, request, format=None): return Response({'success': True}) ## Instruction: Add transaction support to the orders view ## Code After: from django.shortcuts import render from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework.permissions import IsAuthenticated from django.db import transaction class ShopAPIView(APIView): permission_classes = (IsAuthenticated,) class OrdersView(ShopAPIView): """ Place an order. Create the "order" record with general informations, create the "order_details" records with the details of the order. During this transaction any stamp earned by the user is added to the database and at the end voucher(s) are created if there are enough stamps available for that user. """ def post(self, request, format=None): with transaction.atomic(): return Response({'success': True})
# ... existing code ... from rest_framework.response import Response from rest_framework import status from rest_framework.permissions import IsAuthenticated from django.db import transaction class ShopAPIView(APIView): permission_classes = (IsAuthenticated,) class OrdersView(ShopAPIView): """ # ... modified code ... at the end voucher(s) are created if there are enough stamps available for that user. """ def post(self, request, format=None): with transaction.atomic(): return Response({'success': True}) # ... rest of the code ...
3f64d95cae68548cbb0d5a200247b3f7d6c3ccf4
mongorm/__init__.py
mongorm/__init__.py
from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'VERSION', 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
Remove VERSION that prevented import *.
Remove VERSION that prevented import *.
Python
bsd-2-clause
rahulg/mongorm
python
## Code Before: from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'VERSION', 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ] ## Instruction: Remove VERSION that prevented import *. ## Code After: from mongorm.database import Database from mongorm.document import Field, Index from mongorm.utils import DotDict, JSONEncoder class ValidationError(Exception): pass __all__ = [ 'ValidationError', 'Database', 'Field', 'Index', 'DotDict', 'JSONEncoder' ]
// ... existing code ... pass __all__ = [ 'ValidationError', 'Database', 'Field', // ... rest of the code ...
7755ab25249c39350004447daa614bc35e4517e7
src/malibu/__init__.py
src/malibu/__init__.py
from malibu import command # noqa from malibu import config # noqa from malibu import database # noqa from malibu import design # noqa from malibu import text # noqa from malibu import util # noqa import subprocess __git_label__ = '' try: __git_label__ = subprocess.check_output( [ 'git', 'rev-parse', '--short', 'HEAD' ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__).strip('\n') __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. The whole point of this library is to have a small codebase that could be easily reused across projects with nice, easily loadable chunks that can be used disjointly. """
from malibu import command # noqa from malibu import config # noqa from malibu import database # noqa from malibu import design # noqa from malibu import text # noqa from malibu import util # noqa import subprocess __git_label__ = '' try: __git_label__ = subprocess.check_output( [ 'git', 'rev-parse', '--short', 'HEAD' ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' finally: __git_label__ = __git_label__.decode('utf-8').strip() __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__) __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. The whole point of this library is to have a small codebase that could be easily reused across projects with nice, easily loadable chunks that can be used disjointly. """
Remove unnecessary strip, add finally for release tagger
0.1.8: Remove unnecessary strip, add finally for release tagger
Python
unlicense
maiome-development/malibu
python
## Code Before: from malibu import command # noqa from malibu import config # noqa from malibu import database # noqa from malibu import design # noqa from malibu import text # noqa from malibu import util # noqa import subprocess __git_label__ = '' try: __git_label__ = subprocess.check_output( [ 'git', 'rev-parse', '--short', 'HEAD' ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__).strip('\n') __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. The whole point of this library is to have a small codebase that could be easily reused across projects with nice, easily loadable chunks that can be used disjointly. """ ## Instruction: 0.1.8: Remove unnecessary strip, add finally for release tagger ## Code After: from malibu import command # noqa from malibu import config # noqa from malibu import database # noqa from malibu import design # noqa from malibu import text # noqa from malibu import util # noqa import subprocess __git_label__ = '' try: __git_label__ = subprocess.check_output( [ 'git', 'rev-parse', '--short', 'HEAD' ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' finally: __git_label__ = __git_label__.decode('utf-8').strip() __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__) __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. The whole point of this library is to have a small codebase that could be easily reused across projects with nice, easily loadable chunks that can be used disjointly. """
... ]) except (subprocess.CalledProcessError, IOError): __git_label__ = 'RELEASE' finally: __git_label__ = __git_label__.decode('utf-8').strip() __version__ = '0.1.8-7' __release__ = '{}-{}'.format(__version__, __git_label__) __doc__ = """ malibu is a collection of classes and utilities that make writing code a little bit easier and a little less tedious. ...
6941d9048a8c630244bb48100864872b35a1a307
tests/functional/test_layout_and_styling.py
tests/functional/test_layout_and_styling.py
import os from .base import FunctionalTest class LayoutStylingTest(FunctionalTest): def test_bootstrap_links_loaded_successfully(self): self.browser.get(self.live_server_url) self.assertIn( "//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css", self.browser.page_source.strip()) self.assertIn( "//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js", self.browser.page_source.strip()) self.assertIn( '//code.jquery.com/jquery.min.js', self.browser.page_source.strip())
from .base import FunctionalTest class LayoutStylingTest(FunctionalTest): def test_bootstrap_links_loaded_successfully(self): self.browser.get(self.live_server_url) links = [link.get_attribute("href") for link in self.browser.find_elements_by_tag_name('link')] scripts = [script.get_attribute("src") for script in self.browser.find_elements_by_tag_name('script')] self.assertTrue( ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" in link for link in links]) self.assertTrue( ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js" in link for link in links]) self.assertTrue( ["//code.jquery.com/jquery.min.js" in link for link in scripts])
Fix bootstrap and jQuery link checking in homepage
Fix bootstrap and jQuery link checking in homepage
Python
bsd-3-clause
andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop
python
## Code Before: import os from .base import FunctionalTest class LayoutStylingTest(FunctionalTest): def test_bootstrap_links_loaded_successfully(self): self.browser.get(self.live_server_url) self.assertIn( "//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css", self.browser.page_source.strip()) self.assertIn( "//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js", self.browser.page_source.strip()) self.assertIn( '//code.jquery.com/jquery.min.js', self.browser.page_source.strip()) ## Instruction: Fix bootstrap and jQuery link checking in homepage ## Code After: from .base import FunctionalTest class LayoutStylingTest(FunctionalTest): def test_bootstrap_links_loaded_successfully(self): self.browser.get(self.live_server_url) links = [link.get_attribute("href") for link in self.browser.find_elements_by_tag_name('link')] scripts = [script.get_attribute("src") for script in self.browser.find_elements_by_tag_name('script')] self.assertTrue( ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" in link for link in links]) self.assertTrue( ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js" in link for link in links]) self.assertTrue( ["//code.jquery.com/jquery.min.js" in link for link in scripts])
// ... existing code ... from .base import FunctionalTest // ... modified code ... def test_bootstrap_links_loaded_successfully(self): self.browser.get(self.live_server_url) links = [link.get_attribute("href") for link in self.browser.find_elements_by_tag_name('link')] scripts = [script.get_attribute("src") for script in self.browser.find_elements_by_tag_name('script')] self.assertTrue( ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" in link for link in links]) self.assertTrue( ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js" in link for link in links]) self.assertTrue( ["//code.jquery.com/jquery.min.js" in link for link in scripts]) // ... rest of the code ...
8954f0d63dd45e9eec1a7f935870ac7c7d2d0bf2
setup.py
setup.py
from setuptools import setup setup( name="pytest-xdist", use_scm_version={'write_to': 'xdist/_version.py'}, description='py.test xdist plugin for distributed testing' ' and loop-on-failing modes', long_description=open('README.rst').read(), license='MIT', author='holger krekel and contributors', author_email='[email protected],[email protected]', url='https://github.com/pytest-dev/pytest-xdist', platforms=['linux', 'osx', 'win32'], packages=['xdist'], entry_points={ 'pytest11': [ 'xdist = xdist.plugin', 'xdist.looponfail = xdist.looponfail', 'xdist.boxed = xdist.boxed', ], }, zip_safe=False, install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'], setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], )
from setuptools import setup setup( name="pytest-xdist", use_scm_version={'write_to': 'xdist/_version.py'}, description='py.test xdist plugin for distributed testing' ' and loop-on-failing modes', long_description=open('README.rst').read(), license='MIT', author='holger krekel and contributors', author_email='[email protected],[email protected]', url='https://github.com/pytest-dev/pytest-xdist', platforms=['linux', 'osx', 'win32'], packages=['xdist'], entry_points={ 'pytest11': [ 'xdist = xdist.plugin', 'xdist.looponfail = xdist.looponfail', 'xdist.boxed = xdist.boxed', ], }, zip_safe=False, install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'], setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Framework :: Pytest', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], )
Add Framework::Pytest to list of classifiers
Add Framework::Pytest to list of classifiers
Python
mit
pytest-dev/pytest-xdist,nicoddemus/pytest-xdist,RonnyPfannschmidt/pytest-xdist
python
## Code Before: from setuptools import setup setup( name="pytest-xdist", use_scm_version={'write_to': 'xdist/_version.py'}, description='py.test xdist plugin for distributed testing' ' and loop-on-failing modes', long_description=open('README.rst').read(), license='MIT', author='holger krekel and contributors', author_email='[email protected],[email protected]', url='https://github.com/pytest-dev/pytest-xdist', platforms=['linux', 'osx', 'win32'], packages=['xdist'], entry_points={ 'pytest11': [ 'xdist = xdist.plugin', 'xdist.looponfail = xdist.looponfail', 'xdist.boxed = xdist.boxed', ], }, zip_safe=False, install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'], setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], ) ## Instruction: Add Framework::Pytest to list of classifiers ## Code After: from setuptools import setup setup( name="pytest-xdist", use_scm_version={'write_to': 'xdist/_version.py'}, description='py.test xdist plugin for distributed testing' ' and loop-on-failing modes', long_description=open('README.rst').read(), license='MIT', author='holger krekel and contributors', author_email='[email protected],[email protected]', url='https://github.com/pytest-dev/pytest-xdist', platforms=['linux', 'osx', 'win32'], packages=['xdist'], entry_points={ 'pytest11': [ 'xdist = xdist.plugin', 'xdist.looponfail = xdist.looponfail', 'xdist.boxed = xdist.boxed', ], }, zip_safe=False, install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'], setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Framework :: Pytest', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], )
... setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Framework :: Pytest', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', ...
73a76d1c8ea27b1a510fda6fff16b85661ffc9b2
src/main/java/com/ezardlabs/lostsector/missions/DefenseMission.java
src/main/java/com/ezardlabs/lostsector/missions/DefenseMission.java
package com.ezardlabs.lostsector.missions; import com.ezardlabs.lostsector.missions.objectives.Cryopod; public class DefenseMission extends Mission { Cryopod cryopod = new Cryopod(); @Override public void load() { } public void onCryopodDestroyed() { } }
package com.ezardlabs.lostsector.missions; import com.ezardlabs.dethsquare.Camera; import com.ezardlabs.dethsquare.GameObject; import com.ezardlabs.dethsquare.Vector2; import com.ezardlabs.dethsquare.prefabs.PrefabManager; import com.ezardlabs.lostsector.camera.SmartCamera; import com.ezardlabs.lostsector.map.MapManager; import com.ezardlabs.lostsector.missions.objectives.Cryopod; public class DefenseMission extends Mission { Cryopod cryopod = new Cryopod(); @Override public void load() { MapManager.loadMap("defense0"); GameObject player = GameObject.instantiate(PrefabManager.loadPrefab("player"), MapManager.playerSpawn); GameObject.instantiate(new GameObject("Camera", new Camera(true), new SmartCamera(player.transform, 1000, new Vector2(100, 100))), new Vector2()); } public void onCryopodDestroyed() { } }
Load defense map and spawn player and camera
Load defense map and spawn player and camera
Java
mit
8-Bit-Warframe/Lost-Sector
java
## Code Before: package com.ezardlabs.lostsector.missions; import com.ezardlabs.lostsector.missions.objectives.Cryopod; public class DefenseMission extends Mission { Cryopod cryopod = new Cryopod(); @Override public void load() { } public void onCryopodDestroyed() { } } ## Instruction: Load defense map and spawn player and camera ## Code After: package com.ezardlabs.lostsector.missions; import com.ezardlabs.dethsquare.Camera; import com.ezardlabs.dethsquare.GameObject; import com.ezardlabs.dethsquare.Vector2; import com.ezardlabs.dethsquare.prefabs.PrefabManager; import com.ezardlabs.lostsector.camera.SmartCamera; import com.ezardlabs.lostsector.map.MapManager; import com.ezardlabs.lostsector.missions.objectives.Cryopod; public class DefenseMission extends Mission { Cryopod cryopod = new Cryopod(); @Override public void load() { MapManager.loadMap("defense0"); GameObject player = GameObject.instantiate(PrefabManager.loadPrefab("player"), MapManager.playerSpawn); GameObject.instantiate(new GameObject("Camera", new Camera(true), new SmartCamera(player.transform, 1000, new Vector2(100, 100))), new Vector2()); } public void onCryopodDestroyed() { } }
... package com.ezardlabs.lostsector.missions; import com.ezardlabs.dethsquare.Camera; import com.ezardlabs.dethsquare.GameObject; import com.ezardlabs.dethsquare.Vector2; import com.ezardlabs.dethsquare.prefabs.PrefabManager; import com.ezardlabs.lostsector.camera.SmartCamera; import com.ezardlabs.lostsector.map.MapManager; import com.ezardlabs.lostsector.missions.objectives.Cryopod; public class DefenseMission extends Mission { ... @Override public void load() { MapManager.loadMap("defense0"); GameObject player = GameObject.instantiate(PrefabManager.loadPrefab("player"), MapManager.playerSpawn); GameObject.instantiate(new GameObject("Camera", new Camera(true), new SmartCamera(player.transform, 1000, new Vector2(100, 100))), new Vector2()); } public void onCryopodDestroyed() { ...
7d888d853f4f6c1fd8ee8c80fa7598fe071628a1
sample/src/main/java/com/squareup/seismic/sample/Demo.java
sample/src/main/java/com/squareup/seismic/sample/Demo.java
package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } @Override public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
Add @Override annotation for implemented method
Add @Override annotation for implemented method `hearShake` is implemented from `ShakeDetector.Listener`.
Java
apache-2.0
square/seismic,square/seismic
java
## Code Before: package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } } ## Instruction: Add @Override annotation for implemented method `hearShake` is implemented from `ShakeDetector.Listener`. ## Code After: package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } @Override public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
// ... existing code ... setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } @Override public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } } // ... rest of the code ...
8004590503914d9674a0b17f412c8d1836f5e1a1
testScript.py
testScript.py
from elsapy import * conFile = open("config.json") config = json.load(conFile) myCl = elsClient(config['apikey']) myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') myAuth.read(myCl) print ("myAuth.fullName: ", myAuth.fullName) myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849') myAff.read(myCl) print ("myAff.name: ", myAff.name) myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457') myDoc.read(myCl) print ("myDoc.title: ", myDoc.title) myAuth.readDocs(myCl) print ("myAuth.docList: ") i = 0 for doc in myAuth.docList: i += 1 print (i, ' - ', doc['dc:title'])
from elsapy import * conFile = open("config.json") config = json.load(conFile) myCl = elsClient(config['apikey']) myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs ##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs myAuth.read(myCl) print ("myAuth.fullName: ", myAuth.fullName) myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849') myAff.read(myCl) print ("myAff.name: ", myAff.name) myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457') myDoc.read(myCl) print ("myDoc.title: ", myDoc.title) myAuth.readDocs(myCl) print ("myAuth.docList: ") i = 0 for doc in myAuth.docList: i += 1 print (i, ' - ', doc['dc:title'])
Add second author for testing purposes
Add second author for testing purposes
Python
bsd-3-clause
ElsevierDev/elsapy
python
## Code Before: from elsapy import * conFile = open("config.json") config = json.load(conFile) myCl = elsClient(config['apikey']) myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') myAuth.read(myCl) print ("myAuth.fullName: ", myAuth.fullName) myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849') myAff.read(myCl) print ("myAff.name: ", myAff.name) myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457') myDoc.read(myCl) print ("myDoc.title: ", myDoc.title) myAuth.readDocs(myCl) print ("myAuth.docList: ") i = 0 for doc in myAuth.docList: i += 1 print (i, ' - ', doc['dc:title']) ## Instruction: Add second author for testing purposes ## Code After: from elsapy import * conFile = open("config.json") config = json.load(conFile) myCl = elsClient(config['apikey']) myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs ##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs myAuth.read(myCl) print ("myAuth.fullName: ", myAuth.fullName) myAff = elsAffil('http://api.elsevier.com/content/affiliation/AFFILIATION_ID:60016849') myAff.read(myCl) print ("myAff.name: ", myAff.name) myDoc = elsDoc('http://api.elsevier.com/content/abstract/SCOPUS_ID:84872135457') myDoc.read(myCl) print ("myDoc.title: ", myDoc.title) myAuth.readDocs(myCl) print ("myAuth.docList: ") i = 0 for doc in myAuth.docList: i += 1 print (i, ' - ', doc['dc:title'])
# ... existing code ... myCl = elsClient(config['apikey']) myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:7004367821') ## author with more than 25 docs ##myAuth = elsAuthor('http://api.elsevier.com/content/author/AUTHOR_ID:55934026500') ## author with less than 25 docs myAuth.read(myCl) print ("myAuth.fullName: ", myAuth.fullName) # ... rest of the code ...
7560bce01be5560395dd2373e979dbee086f3c21
py2app/converters/nibfile.py
py2app/converters/nibfile.py
import subprocess, os from py2app.decorators import converts @converts(suffix=".xib") def convert_xib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" if dry_run: return p = subprocess.Popen(['ibtool', '--compile', destination, source]) xit = p.wait() if xit != 0: raise RuntimeError("ibtool failed, code %d"%(xit,)) @converts(suffix=".nib") def convert_nib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" if dry_run: return p = subprocess.Popen(['ibtool', '--compile', destination, source]) xit = p.wait() if xit != 0: raise RuntimeError("ibtool failed, code %d"%(xit,))
from __future__ import print_function import subprocess, os from py2app.decorators import converts gTool = None def _get_ibtool(): global gTool if gTool is None: if os.path.exists('/usr/bin/xcrun'): gTool = subprocess.check_output(['/usr/bin/xcrun', '-find', 'ibtool'])[:-1] else: gTool = 'ibtool' print (gTool) return gTool @converts(suffix=".xib") def convert_xib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" print("compile %s -> %s"%(source, destination)) if dry_run: return subprocess.check_call([_get_ibtool(), '--compile', destination, source]) @converts(suffix=".nib") def convert_nib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" print("compile %s -> %s"%(source, destination)) if dry_run: return subprocess.check_call([_get_ibtool, '--compile', destination, source])
Simplify nib compiler and support recent Xcode versions by using xcrun
Simplify nib compiler and support recent Xcode versions by using xcrun
Python
mit
metachris/py2app,metachris/py2app,metachris/py2app,metachris/py2app
python
## Code Before: import subprocess, os from py2app.decorators import converts @converts(suffix=".xib") def convert_xib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" if dry_run: return p = subprocess.Popen(['ibtool', '--compile', destination, source]) xit = p.wait() if xit != 0: raise RuntimeError("ibtool failed, code %d"%(xit,)) @converts(suffix=".nib") def convert_nib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" if dry_run: return p = subprocess.Popen(['ibtool', '--compile', destination, source]) xit = p.wait() if xit != 0: raise RuntimeError("ibtool failed, code %d"%(xit,)) ## Instruction: Simplify nib compiler and support recent Xcode versions by using xcrun ## Code After: from __future__ import print_function import subprocess, os from py2app.decorators import converts gTool = None def _get_ibtool(): global gTool if gTool is None: if os.path.exists('/usr/bin/xcrun'): gTool = subprocess.check_output(['/usr/bin/xcrun', '-find', 'ibtool'])[:-1] else: gTool = 'ibtool' print (gTool) return gTool @converts(suffix=".xib") def convert_xib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" print("compile %s -> %s"%(source, destination)) if dry_run: return subprocess.check_call([_get_ibtool(), '--compile', destination, source]) @converts(suffix=".nib") def convert_nib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" print("compile %s -> %s"%(source, destination)) if dry_run: return subprocess.check_call([_get_ibtool, '--compile', destination, source])
# ... existing code ... from __future__ import print_function import subprocess, os from py2app.decorators import converts gTool = None def _get_ibtool(): global gTool if gTool is None: if os.path.exists('/usr/bin/xcrun'): gTool = subprocess.check_output(['/usr/bin/xcrun', '-find', 'ibtool'])[:-1] else: gTool = 'ibtool' print (gTool) return gTool @converts(suffix=".xib") def convert_xib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" print("compile %s -> %s"%(source, destination)) if dry_run: return subprocess.check_call([_get_ibtool(), '--compile', destination, source]) @converts(suffix=".nib") def convert_nib(source, destination, dry_run=0): destination = destination[:-4] + ".nib" print("compile %s -> %s"%(source, destination)) if dry_run: return subprocess.check_call([_get_ibtool, '--compile', destination, source]) # ... rest of the code ...
1fa22ca68394d4ce55a4e10aa7c23f7bcfa02f79
zc_common/remote_resource/mixins.py
zc_common/remote_resource/mixins.py
from django.db import IntegrityError from django.http import Http404 class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'ids' in self.request.query_params: query_param_ids = self.request.query_params.get('ids') ids = [] if not query_param_ids else query_param_ids.split(',') try: self.queryset = self.queryset.filter(pk__in=ids) except (ValueError, IntegrityError): raise Http404 return self.queryset
from django.db import IntegrityError from django.http import Http404 class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'filter[id]' in self.request.query_params: query_param_ids = self.request.query_params.get('filter[id]') ids = [] if not query_param_ids else query_param_ids.split(',') try: self.queryset = self.queryset.filter(pk__in=ids) except (ValueError, IntegrityError): raise Http404 return self.queryset
Update query param for mixin
Update query param for mixin
Python
mit
ZeroCater/zc_common,ZeroCater/zc_common
python
## Code Before: from django.db import IntegrityError from django.http import Http404 class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'ids' in self.request.query_params: query_param_ids = self.request.query_params.get('ids') ids = [] if not query_param_ids else query_param_ids.split(',') try: self.queryset = self.queryset.filter(pk__in=ids) except (ValueError, IntegrityError): raise Http404 return self.queryset ## Instruction: Update query param for mixin ## Code After: from django.db import IntegrityError from django.http import Http404 class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'filter[id]' in self.request.query_params: query_param_ids = self.request.query_params.get('filter[id]') ids = [] if not query_param_ids else query_param_ids.split(',') try: self.queryset = self.queryset.filter(pk__in=ids) except (ValueError, IntegrityError): raise Http404 return self.queryset
... """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'filter[id]' in self.request.query_params: query_param_ids = self.request.query_params.get('filter[id]') ids = [] if not query_param_ids else query_param_ids.split(',') try: ...
d951684148189a3ae163f9919158a4ff7dbe6e4f
scala-string-format-core/src/main/scala/com/komanov/stringformat/JavaFormats.java
scala-string-format-core/src/main/scala/com/komanov/stringformat/JavaFormats.java
package com.komanov.stringformat; import org.slf4j.helpers.MessageFormatter; import java.text.MessageFormat; import java.util.Locale; public class JavaFormats { private static final MessageFormat messageFormatInstance = new MessageFormat("{0}a{1}b{2}{3}"); public static String concat(int value1, String value2, Object nullObject) { return value1 + "a" + value2 + "b" + value2 + nullObject; } public static String stringFormat(int value1, String value2, Object nullObject) { return String.format(Locale.ENGLISH, "%da%sb%s%s", value1, value2, value2, nullObject); } public static String messageFormat(int value1, String value2, Object nullObject) { return MessageFormat.format("{0}a{1}b{2}{3}", value1, value2, value2, nullObject); } public static String messageFormatCached(int value1, String value2, Object nullObject) { return messageFormatInstance.format(new Object[]{value1, value2, value2, nullObject}); } public static String slf4j(int value1, String value2, Object nullObject) { return MessageFormatter .arrayFormat("{}a{}b{}{}", new Object[]{value1, value2, value2, nullObject}) .getMessage(); } }
package com.komanov.stringformat; import org.slf4j.helpers.MessageFormatter; import java.text.MessageFormat; import java.util.Locale; public class JavaFormats { private static final MessageFormat messageFormatInstance = new MessageFormat("{0,number,#}a{1}b{2}{3}"); public static String concat(int value1, String value2, Object nullObject) { return value1 + "a" + value2 + "b" + value2 + nullObject; } public static String stringFormat(int value1, String value2, Object nullObject) { return String.format(Locale.ENGLISH, "%da%sb%s%s", value1, value2, value2, nullObject); } public static String messageFormat(int value1, String value2, Object nullObject) { return MessageFormat.format("{0,number,#}a{1}b{2}{3}", value1, value2, value2, nullObject); } public static String messageFormatCached(int value1, String value2, Object nullObject) { return messageFormatInstance.format(new Object[]{value1, value2, value2, nullObject}); } public static String slf4j(int value1, String value2, Object nullObject) { return MessageFormatter .arrayFormat("{}a{}b{}{}", new Object[]{value1, value2, value2, nullObject}) .getMessage(); } }
Fix format string for MessageFormat
Fix format string for MessageFormat
Java
mit
dkomanov/scala-string-format,dkomanov/scala-string-format
java
## Code Before: package com.komanov.stringformat; import org.slf4j.helpers.MessageFormatter; import java.text.MessageFormat; import java.util.Locale; public class JavaFormats { private static final MessageFormat messageFormatInstance = new MessageFormat("{0}a{1}b{2}{3}"); public static String concat(int value1, String value2, Object nullObject) { return value1 + "a" + value2 + "b" + value2 + nullObject; } public static String stringFormat(int value1, String value2, Object nullObject) { return String.format(Locale.ENGLISH, "%da%sb%s%s", value1, value2, value2, nullObject); } public static String messageFormat(int value1, String value2, Object nullObject) { return MessageFormat.format("{0}a{1}b{2}{3}", value1, value2, value2, nullObject); } public static String messageFormatCached(int value1, String value2, Object nullObject) { return messageFormatInstance.format(new Object[]{value1, value2, value2, nullObject}); } public static String slf4j(int value1, String value2, Object nullObject) { return MessageFormatter .arrayFormat("{}a{}b{}{}", new Object[]{value1, value2, value2, nullObject}) .getMessage(); } } ## Instruction: Fix format string for MessageFormat ## Code After: package com.komanov.stringformat; import org.slf4j.helpers.MessageFormatter; import java.text.MessageFormat; import java.util.Locale; public class JavaFormats { private static final MessageFormat messageFormatInstance = new MessageFormat("{0,number,#}a{1}b{2}{3}"); public static String concat(int value1, String value2, Object nullObject) { return value1 + "a" + value2 + "b" + value2 + nullObject; } public static String stringFormat(int value1, String value2, Object nullObject) { return String.format(Locale.ENGLISH, "%da%sb%s%s", value1, value2, value2, nullObject); } public static String messageFormat(int value1, String value2, Object nullObject) { return MessageFormat.format("{0,number,#}a{1}b{2}{3}", value1, value2, value2, nullObject); } public static String messageFormatCached(int value1, String value2, Object nullObject) { return messageFormatInstance.format(new Object[]{value1, value2, value2, nullObject}); } public static String slf4j(int value1, String value2, Object nullObject) { return MessageFormatter .arrayFormat("{}a{}b{}{}", new Object[]{value1, value2, value2, nullObject}) .getMessage(); } }
# ... existing code ... import java.util.Locale; public class JavaFormats { private static final MessageFormat messageFormatInstance = new MessageFormat("{0,number,#}a{1}b{2}{3}"); public static String concat(int value1, String value2, Object nullObject) { return value1 + "a" + value2 + "b" + value2 + nullObject; # ... modified code ... } public static String messageFormat(int value1, String value2, Object nullObject) { return MessageFormat.format("{0,number,#}a{1}b{2}{3}", value1, value2, value2, nullObject); } public static String messageFormatCached(int value1, String value2, Object nullObject) { # ... rest of the code ...
3483933b7e5709ef79a3f632bae09d24b22f4a44
pygp/likelihoods/__base.py
pygp/likelihoods/__base.py
# future imports from __future__ import division from __future__ import absolute_import from __future__ import print_function # global imports import abc # local imports from ..utils.models import Parameterized # exported symbols __all__ = ['Likelihood', 'RealLikelihood'] class Likelihood(Parameterized): """ Likelihood interface. """ @abc.abstractmethod def transform(self, y): pass class RealLikelihood(Likelihood): def transform(self, y): return np.array(y, ndmin=1, dtype=float, copy=False)
# future imports from __future__ import division from __future__ import absolute_import from __future__ import print_function # global imports import numpy as np import abc # local imports from ..utils.models import Parameterized # exported symbols __all__ = ['Likelihood', 'RealLikelihood'] class Likelihood(Parameterized): """ Likelihood interface. """ @abc.abstractmethod def transform(self, y): pass class RealLikelihood(Likelihood): def transform(self, y): return np.array(y, ndmin=1, dtype=float, copy=False)
Fix bug in RealLikelihood due to not importing numpy.
Fix bug in RealLikelihood due to not importing numpy.
Python
bsd-2-clause
mwhoffman/pygp
python
## Code Before: # future imports from __future__ import division from __future__ import absolute_import from __future__ import print_function # global imports import abc # local imports from ..utils.models import Parameterized # exported symbols __all__ = ['Likelihood', 'RealLikelihood'] class Likelihood(Parameterized): """ Likelihood interface. """ @abc.abstractmethod def transform(self, y): pass class RealLikelihood(Likelihood): def transform(self, y): return np.array(y, ndmin=1, dtype=float, copy=False) ## Instruction: Fix bug in RealLikelihood due to not importing numpy. ## Code After: # future imports from __future__ import division from __future__ import absolute_import from __future__ import print_function # global imports import numpy as np import abc # local imports from ..utils.models import Parameterized # exported symbols __all__ = ['Likelihood', 'RealLikelihood'] class Likelihood(Parameterized): """ Likelihood interface. """ @abc.abstractmethod def transform(self, y): pass class RealLikelihood(Likelihood): def transform(self, y): return np.array(y, ndmin=1, dtype=float, copy=False)
# ... existing code ... from __future__ import print_function # global imports import numpy as np import abc # local imports # ... rest of the code ...
e393dec56e978a91391eee9d72757d78ada7fc1a
server/src/main/kotlin/org/javacs/kt/codeaction/quickfix/QuickFix.kt
server/src/main/kotlin/org/javacs/kt/codeaction/quickfix/QuickFix.kt
package org.javacs.kt.codeaction.quickfix import org.eclipse.lsp4j.CodeAction import org.eclipse.lsp4j.Command import org.eclipse.lsp4j.Diagnostic import org.eclipse.lsp4j.Range import org.eclipse.lsp4j.jsonrpc.messages.Either import org.javacs.kt.CompiledFile import org.jetbrains.kotlin.resolve.diagnostics.Diagnostics import org.jetbrains.kotlin.diagnostics.Diagnostic as KotlinDiagnostic interface QuickFix { // Computes the quickfix. Return null if the quickfix is not valid. fun compute(file: CompiledFile, range: Range, diagnostics: List<Diagnostic>): Either<Command, CodeAction>? } fun diagnosticMatch(diagnostic: Diagnostic, range: Range, diagnosticTypes: HashSet<String>): Boolean = diagnostic.range.equals(range) && diagnosticTypes.contains(diagnostic.code.left) fun diagnosticMatch(diagnostic: KotlinDiagnostic, startCursor: Int, endCursor: Int, diagnosticTypes: HashSet<String>): Boolean = diagnostic.textRanges.any { it.startOffset == startCursor && it.endOffset == endCursor } && diagnosticTypes.contains(diagnostic.factory.name) fun findDiagnosticMatch(diagnostics: List<Diagnostic>, range: Range, diagnosticTypes: HashSet<String>) = diagnostics.find { diagnosticMatch(it, range, diagnosticTypes) } fun anyDiagnosticMatch(diagnostics: Diagnostics, startCursor: Int, endCursor: Int, diagnosticTypes: HashSet<String>) = diagnostics.any { diagnosticMatch(it, startCursor, endCursor, diagnosticTypes) }
package org.javacs.kt.codeaction.quickfix import org.eclipse.lsp4j.CodeAction import org.eclipse.lsp4j.Command import org.eclipse.lsp4j.Diagnostic import org.eclipse.lsp4j.Range import org.eclipse.lsp4j.jsonrpc.messages.Either import org.javacs.kt.CompiledFile import org.jetbrains.kotlin.resolve.diagnostics.Diagnostics import org.jetbrains.kotlin.diagnostics.Diagnostic as KotlinDiagnostic interface QuickFix { // Computes the quickfix. Return null if the quickfix is not valid. fun compute(file: CompiledFile, range: Range, diagnostics: List<Diagnostic>): Either<Command, CodeAction>? } fun diagnosticMatch(diagnostic: Diagnostic, range: Range, diagnosticTypes: Set<String>): Boolean = diagnostic.range.equals(range) && diagnosticTypes.contains(diagnostic.code.left) fun diagnosticMatch(diagnostic: KotlinDiagnostic, startCursor: Int, endCursor: Int, diagnosticTypes: Set<String>): Boolean = diagnostic.textRanges.any { it.startOffset == startCursor && it.endOffset == endCursor } && diagnosticTypes.contains(diagnostic.factory.name) fun findDiagnosticMatch(diagnostics: List<Diagnostic>, range: Range, diagnosticTypes: Set<String>) = diagnostics.find { diagnosticMatch(it, range, diagnosticTypes) } fun anyDiagnosticMatch(diagnostics: Diagnostics, startCursor: Int, endCursor: Int, diagnosticTypes: Set<String>) = diagnostics.any { diagnosticMatch(it, startCursor, endCursor, diagnosticTypes) }
Use Set instead of HashSet in diagnostic match helpers
Use Set instead of HashSet in diagnostic match helpers
Kotlin
mit
fwcd/kotlin-language-server,fwcd/kotlin-language-server,fwcd/kotlin-language-server
kotlin
## Code Before: package org.javacs.kt.codeaction.quickfix import org.eclipse.lsp4j.CodeAction import org.eclipse.lsp4j.Command import org.eclipse.lsp4j.Diagnostic import org.eclipse.lsp4j.Range import org.eclipse.lsp4j.jsonrpc.messages.Either import org.javacs.kt.CompiledFile import org.jetbrains.kotlin.resolve.diagnostics.Diagnostics import org.jetbrains.kotlin.diagnostics.Diagnostic as KotlinDiagnostic interface QuickFix { // Computes the quickfix. Return null if the quickfix is not valid. fun compute(file: CompiledFile, range: Range, diagnostics: List<Diagnostic>): Either<Command, CodeAction>? } fun diagnosticMatch(diagnostic: Diagnostic, range: Range, diagnosticTypes: HashSet<String>): Boolean = diagnostic.range.equals(range) && diagnosticTypes.contains(diagnostic.code.left) fun diagnosticMatch(diagnostic: KotlinDiagnostic, startCursor: Int, endCursor: Int, diagnosticTypes: HashSet<String>): Boolean = diagnostic.textRanges.any { it.startOffset == startCursor && it.endOffset == endCursor } && diagnosticTypes.contains(diagnostic.factory.name) fun findDiagnosticMatch(diagnostics: List<Diagnostic>, range: Range, diagnosticTypes: HashSet<String>) = diagnostics.find { diagnosticMatch(it, range, diagnosticTypes) } fun anyDiagnosticMatch(diagnostics: Diagnostics, startCursor: Int, endCursor: Int, diagnosticTypes: HashSet<String>) = diagnostics.any { diagnosticMatch(it, startCursor, endCursor, diagnosticTypes) } ## Instruction: Use Set instead of HashSet in diagnostic match helpers ## Code After: package org.javacs.kt.codeaction.quickfix import org.eclipse.lsp4j.CodeAction import org.eclipse.lsp4j.Command import org.eclipse.lsp4j.Diagnostic import org.eclipse.lsp4j.Range import org.eclipse.lsp4j.jsonrpc.messages.Either import org.javacs.kt.CompiledFile import org.jetbrains.kotlin.resolve.diagnostics.Diagnostics import org.jetbrains.kotlin.diagnostics.Diagnostic as KotlinDiagnostic interface QuickFix { // Computes the quickfix. Return null if the quickfix is not valid. fun compute(file: CompiledFile, range: Range, diagnostics: List<Diagnostic>): Either<Command, CodeAction>? } fun diagnosticMatch(diagnostic: Diagnostic, range: Range, diagnosticTypes: Set<String>): Boolean = diagnostic.range.equals(range) && diagnosticTypes.contains(diagnostic.code.left) fun diagnosticMatch(diagnostic: KotlinDiagnostic, startCursor: Int, endCursor: Int, diagnosticTypes: Set<String>): Boolean = diagnostic.textRanges.any { it.startOffset == startCursor && it.endOffset == endCursor } && diagnosticTypes.contains(diagnostic.factory.name) fun findDiagnosticMatch(diagnostics: List<Diagnostic>, range: Range, diagnosticTypes: Set<String>) = diagnostics.find { diagnosticMatch(it, range, diagnosticTypes) } fun anyDiagnosticMatch(diagnostics: Diagnostics, startCursor: Int, endCursor: Int, diagnosticTypes: Set<String>) = diagnostics.any { diagnosticMatch(it, startCursor, endCursor, diagnosticTypes) }
... fun compute(file: CompiledFile, range: Range, diagnostics: List<Diagnostic>): Either<Command, CodeAction>? } fun diagnosticMatch(diagnostic: Diagnostic, range: Range, diagnosticTypes: Set<String>): Boolean = diagnostic.range.equals(range) && diagnosticTypes.contains(diagnostic.code.left) fun diagnosticMatch(diagnostic: KotlinDiagnostic, startCursor: Int, endCursor: Int, diagnosticTypes: Set<String>): Boolean = diagnostic.textRanges.any { it.startOffset == startCursor && it.endOffset == endCursor } && diagnosticTypes.contains(diagnostic.factory.name) fun findDiagnosticMatch(diagnostics: List<Diagnostic>, range: Range, diagnosticTypes: Set<String>) = diagnostics.find { diagnosticMatch(it, range, diagnosticTypes) } fun anyDiagnosticMatch(diagnostics: Diagnostics, startCursor: Int, endCursor: Int, diagnosticTypes: Set<String>) = diagnostics.any { diagnosticMatch(it, startCursor, endCursor, diagnosticTypes) } ...
1a9254b0fce9ff28f510be5ad5f8a71e4b983626
src/test/java/alien4cloud/packager/ArchiveDownloader.java
src/test/java/alien4cloud/packager/ArchiveDownloader.java
package alien4cloud.packager; import alien4cloud.git.RepositoryManager; import alien4cloud.utils.FileUtil; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; /** * Utility that downloads the archives we want to package. */ public class ArchiveDownloader { public static void main(String[] args) throws IOException { Path buildDirectory = Paths.get(args[0]); Path gitDirectory = buildDirectory.resolve("git"); Path zipDirectory = buildDirectory.resolve("archives"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/tosca-normative-types.git", "master", "tosca-normative-types"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/alien4cloud-extended-types.git", "master", "alien4cloud-extended-types"); // Do we want to initialize alien with sample topology. // repositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/samples.git", "master", "samples"); FileUtil.zip(gitDirectory.resolve("tosca-normative-types"), zipDirectory.resolve("tosca-normative-types.zip")); FileUtil.zip(gitDirectory.resolve("alien4cloud-extended-types/alien-extended-storage-types"), zipDirectory.resolve("alien-extended-storage-types.zip")); // zip archives // FileUtil.zip(from, to); } }
package alien4cloud.packager; import alien4cloud.git.RepositoryManager; import alien4cloud.utils.FileUtil; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; /** * Utility that downloads the archives we want to package. */ public class ArchiveDownloader { public static void main(String[] args) throws IOException { Path buildDirectory = Paths.get(args[0]); Path gitDirectory = buildDirectory.resolve("git"); Path zipDirectory = buildDirectory.resolve("archives"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/tosca-normative-types.git", "1.2.0", "tosca-normative-types"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/alien4cloud-extended-types.git", "1.2.0", "alien4cloud-extended-types"); // Do we want to initialize alien with sample topology. // repositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/samples.git", "master", "samples"); FileUtil.zip(gitDirectory.resolve("tosca-normative-types"), zipDirectory.resolve("tosca-normative-types.zip")); FileUtil.zip(gitDirectory.resolve("alien4cloud-extended-types/alien-extended-storage-types"), zipDirectory.resolve("alien-extended-storage-types.zip")); // zip archives // FileUtil.zip(from, to); } }
Fix archive versions for 1.3.0
Fix archive versions for 1.3.0
Java
apache-2.0
alien4cloud/alien4cloud-dist,alien4cloud/alien4cloud-dist
java
## Code Before: package alien4cloud.packager; import alien4cloud.git.RepositoryManager; import alien4cloud.utils.FileUtil; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; /** * Utility that downloads the archives we want to package. */ public class ArchiveDownloader { public static void main(String[] args) throws IOException { Path buildDirectory = Paths.get(args[0]); Path gitDirectory = buildDirectory.resolve("git"); Path zipDirectory = buildDirectory.resolve("archives"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/tosca-normative-types.git", "master", "tosca-normative-types"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/alien4cloud-extended-types.git", "master", "alien4cloud-extended-types"); // Do we want to initialize alien with sample topology. // repositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/samples.git", "master", "samples"); FileUtil.zip(gitDirectory.resolve("tosca-normative-types"), zipDirectory.resolve("tosca-normative-types.zip")); FileUtil.zip(gitDirectory.resolve("alien4cloud-extended-types/alien-extended-storage-types"), zipDirectory.resolve("alien-extended-storage-types.zip")); // zip archives // FileUtil.zip(from, to); } } ## Instruction: Fix archive versions for 1.3.0 ## Code After: package alien4cloud.packager; import alien4cloud.git.RepositoryManager; import alien4cloud.utils.FileUtil; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; /** * Utility that downloads the archives we want to package. */ public class ArchiveDownloader { public static void main(String[] args) throws IOException { Path buildDirectory = Paths.get(args[0]); Path gitDirectory = buildDirectory.resolve("git"); Path zipDirectory = buildDirectory.resolve("archives"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/tosca-normative-types.git", "1.2.0", "tosca-normative-types"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/alien4cloud-extended-types.git", "1.2.0", "alien4cloud-extended-types"); // Do we want to initialize alien with sample topology. // repositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/samples.git", "master", "samples"); FileUtil.zip(gitDirectory.resolve("tosca-normative-types"), zipDirectory.resolve("tosca-normative-types.zip")); FileUtil.zip(gitDirectory.resolve("alien4cloud-extended-types/alien-extended-storage-types"), zipDirectory.resolve("alien-extended-storage-types.zip")); // zip archives // FileUtil.zip(from, to); } }
// ... existing code ... Path gitDirectory = buildDirectory.resolve("git"); Path zipDirectory = buildDirectory.resolve("archives"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/tosca-normative-types.git", "1.2.0", "tosca-normative-types"); RepositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/alien4cloud-extended-types.git", "1.2.0", "alien4cloud-extended-types"); // Do we want to initialize alien with sample topology. // repositoryManager.cloneOrCheckout(gitDirectory, "https://github.com/alien4cloud/samples.git", "master", "samples"); // ... rest of the code ...
cfcee83354f4917e719c3ef4236a2644dc98e153
ophyd/__init__.py
ophyd/__init__.py
import logging logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) from . import * # Signals from .signal import (Signal, EpicsSignal, EpicsSignalRO) # Positioners from .positioner import Positioner from .epics_motor import EpicsMotor from .pv_positioner import (PVPositioner, PVPositionerPC) from .pseudopos import (PseudoPositioner, PseudoSingle) # Devices from .scaler import EpicsScaler from .device import (Device, Component, DynamicDeviceComponent) from .mca import EpicsMCA, EpicsDXP # Areadetector-related from .areadetector import * from ._version import get_versions from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos, log_pos_diff, log_pos_mov) __version__ = get_versions()['version'] del get_versions
import logging logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) from . import * # Signals from .signal import (Signal, EpicsSignal, EpicsSignalRO) # Positioners from .positioner import Positioner from .epics_motor import EpicsMotor from .pv_positioner import (PVPositioner, PVPositionerPC) from .pseudopos import (PseudoPositioner, PseudoSingle) # Devices from .scaler import EpicsScaler from .device import (Device, Component, DynamicDeviceComponent) from .ophydobj import StatusBase from .mca import EpicsMCA, EpicsDXP # Areadetector-related from .areadetector import * from ._version import get_versions from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos, log_pos_diff, log_pos_mov) __version__ = get_versions()['version'] del get_versions
Add StatusBase to top-level API.
MNT: Add StatusBase to top-level API.
Python
bsd-3-clause
dchabot/ophyd,dchabot/ophyd
python
## Code Before: import logging logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) from . import * # Signals from .signal import (Signal, EpicsSignal, EpicsSignalRO) # Positioners from .positioner import Positioner from .epics_motor import EpicsMotor from .pv_positioner import (PVPositioner, PVPositionerPC) from .pseudopos import (PseudoPositioner, PseudoSingle) # Devices from .scaler import EpicsScaler from .device import (Device, Component, DynamicDeviceComponent) from .mca import EpicsMCA, EpicsDXP # Areadetector-related from .areadetector import * from ._version import get_versions from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos, log_pos_diff, log_pos_mov) __version__ = get_versions()['version'] del get_versions ## Instruction: MNT: Add StatusBase to top-level API. ## Code After: import logging logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) from . import * # Signals from .signal import (Signal, EpicsSignal, EpicsSignalRO) # Positioners from .positioner import Positioner from .epics_motor import EpicsMotor from .pv_positioner import (PVPositioner, PVPositionerPC) from .pseudopos import (PseudoPositioner, PseudoSingle) # Devices from .scaler import EpicsScaler from .device import (Device, Component, DynamicDeviceComponent) from .ophydobj import StatusBase from .mca import EpicsMCA, EpicsDXP # Areadetector-related from .areadetector import * from ._version import get_versions from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos, log_pos_diff, log_pos_mov) __version__ = get_versions()['version'] del get_versions
# ... existing code ... # Devices from .scaler import EpicsScaler from .device import (Device, Component, DynamicDeviceComponent) from .ophydobj import StatusBase from .mca import EpicsMCA, EpicsDXP # Areadetector-related # ... rest of the code ...
dbce975bcb348e0f878f39557d911e99ba08294c
corehq/apps/hqcase/management/commands/ptop_reindexer_v2.py
corehq/apps/hqcase/management/commands/ptop_reindexer_v2.py
from django.core.management import BaseCommand, CommandError from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer class Command(BaseCommand): args = 'index' help = 'Reindex a pillowtop index' def handle(self, index, *args, **options): reindex_fns = { 'case': get_couch_case_reindexer, 'form': get_couch_form_reindexer, 'sql-case': get_sql_case_reindexer, 'sql-form': get_sql_form_reindexer, } if index not in reindex_fns: raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys()))) reindexer = reindex_fns[index]() reindexer.reindex()
from django.core.management import BaseCommand, CommandError from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer from corehq.pillows.case_search import get_couch_case_search_reindexer from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer class Command(BaseCommand): args = 'index' help = 'Reindex a pillowtop index' def handle(self, index, *args, **options): reindex_fns = { 'case': get_couch_case_reindexer, 'form': get_couch_form_reindexer, 'sql-case': get_sql_case_reindexer, 'sql-form': get_sql_form_reindexer, 'case-search': get_couch_case_search_reindexer } if index not in reindex_fns: raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys()))) reindexer = reindex_fns[index]() reindexer.reindex()
Enable reindexing with v2 reindexer
Enable reindexing with v2 reindexer
Python
bsd-3-clause
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
python
## Code Before: from django.core.management import BaseCommand, CommandError from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer class Command(BaseCommand): args = 'index' help = 'Reindex a pillowtop index' def handle(self, index, *args, **options): reindex_fns = { 'case': get_couch_case_reindexer, 'form': get_couch_form_reindexer, 'sql-case': get_sql_case_reindexer, 'sql-form': get_sql_form_reindexer, } if index not in reindex_fns: raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys()))) reindexer = reindex_fns[index]() reindexer.reindex() ## Instruction: Enable reindexing with v2 reindexer ## Code After: from django.core.management import BaseCommand, CommandError from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer from corehq.pillows.case_search import get_couch_case_search_reindexer from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer class Command(BaseCommand): args = 'index' help = 'Reindex a pillowtop index' def handle(self, index, *args, **options): reindex_fns = { 'case': get_couch_case_reindexer, 'form': get_couch_form_reindexer, 'sql-case': get_sql_case_reindexer, 'sql-form': get_sql_form_reindexer, 'case-search': get_couch_case_search_reindexer } if index not in reindex_fns: raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys()))) reindexer = reindex_fns[index]() reindexer.reindex()
# ... existing code ... from django.core.management import BaseCommand, CommandError from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer from corehq.pillows.case_search import get_couch_case_search_reindexer from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer # ... modified code ... 'form': get_couch_form_reindexer, 'sql-case': get_sql_case_reindexer, 'sql-form': get_sql_form_reindexer, 'case-search': get_couch_case_search_reindexer } if index not in reindex_fns: raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys()))) # ... rest of the code ...
d7454240f4f888309a63bba07526a821962c9670
masters/master.chromium.webrtc.fyi/master_source_cfg.py
masters/master.chromium.webrtc.fyi/master_source_cfg.py
from master import gitiles_poller def Update(config, c): webrtc_repo_url = config.Master.git_server_url + '/external/webrtc/' webrtc_poller = gitiles_poller.GitilesPoller(webrtc_repo_url, project='webrtc') c['change_source'].append(webrtc_poller) samples_poller = gitiles_poller.GitilesPoller( config.Master.git_server_url + '/external/webrtc-samples', project='webrtc-samples', comparator=webrtc_poller.comparator) c['change_source'].append(samples_poller)
from master import gitiles_poller def Update(config, c): webrtc_repo_url = config.Master.git_server_url + '/external/webrtc/' webrtc_poller = gitiles_poller.GitilesPoller(webrtc_repo_url, project='webrtc') c['change_source'].append(webrtc_poller)
Remove poller for webrtc-samples repo.
WebRTC: Remove poller for webrtc-samples repo. Having multiple GittilesPollers produces blamelists with hashes from different repos, but more importantly perf dashboard links that are not valid, since the from-hash can be from the Chromium repo and the to-hash from the webrtc-samples repo. Since breakages by changes in the webrtc-samples are rare, we should just remove the poller of that repo for simplicity. What has changed reasonably recently is that we download a prebuilt AppRTC which is pinned to specific revision and rolled manually, so if there are changes in AppRTC that are breaking Chromium, we still wouldn't detect them anymore. This solves one of the problems in crbug.com/491520 but more work is needed. [email protected] BUG=491520 Review URL: https://codereview.chromium.org/1754243003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299053 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
python
## Code Before: from master import gitiles_poller def Update(config, c): webrtc_repo_url = config.Master.git_server_url + '/external/webrtc/' webrtc_poller = gitiles_poller.GitilesPoller(webrtc_repo_url, project='webrtc') c['change_source'].append(webrtc_poller) samples_poller = gitiles_poller.GitilesPoller( config.Master.git_server_url + '/external/webrtc-samples', project='webrtc-samples', comparator=webrtc_poller.comparator) c['change_source'].append(samples_poller) ## Instruction: WebRTC: Remove poller for webrtc-samples repo. Having multiple GittilesPollers produces blamelists with hashes from different repos, but more importantly perf dashboard links that are not valid, since the from-hash can be from the Chromium repo and the to-hash from the webrtc-samples repo. Since breakages by changes in the webrtc-samples are rare, we should just remove the poller of that repo for simplicity. What has changed reasonably recently is that we download a prebuilt AppRTC which is pinned to specific revision and rolled manually, so if there are changes in AppRTC that are breaking Chromium, we still wouldn't detect them anymore. This solves one of the problems in crbug.com/491520 but more work is needed. [email protected] BUG=491520 Review URL: https://codereview.chromium.org/1754243003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299053 0039d316-1c4b-4281-b951-d872f2087c98 ## Code After: from master import gitiles_poller def Update(config, c): webrtc_repo_url = config.Master.git_server_url + '/external/webrtc/' webrtc_poller = gitiles_poller.GitilesPoller(webrtc_repo_url, project='webrtc') c['change_source'].append(webrtc_poller)
// ... existing code ... webrtc_poller = gitiles_poller.GitilesPoller(webrtc_repo_url, project='webrtc') c['change_source'].append(webrtc_poller) // ... rest of the code ...
180d8d721d321609f18eed9f44d59d32f474dc13
project_fish/whats_fresh/tests/test_products_model.py
project_fish/whats_fresh/tests/test_products_model.py
from django.test import TestCase from django.conf import settings from phonenumber_field.modelfields import PhoneNumberField from whats_fresh.models import * from django.contrib.gis.db import models import os import time import sys import datetime class ProductTestCase(TestCase): def setUp(self): self.expected_fields = { 'name': models.TextField, 'variety': models.TextField, 'alt_name': models.TextField, 'description': models.TextField, 'origin': models.TextField, 'season': models.TextField, 'available': models.BooleanField, 'market_price': models.TextField, 'link': models.TextField, 'image_id': models.FloatField, 'stories_id': models.FloatField, 'created': models.DateTimeField, 'modified': models.DateTimeField, 'id': models.AutoField } def test_fields_exist(self): model = models.get_model('whats_fresh', 'Product') for field, field_type in self.expected_fields.items(): self.assertEqual( field_type, type(model._meta.get_field_by_name(field)[0])) def test_no_additional_fields(self): fields = Product._meta.get_all_field_names() self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys())) def test_created_modified_fields(self): self.assertTrue(Product._meta.get_field('modified').auto_now) self.assertTrue(Product._meta.get_field('created').auto_now_add)
from django.test import TestCase from django.conf import settings from phonenumber_field.modelfields import PhoneNumberField from whats_fresh.models import * from django.contrib.gis.db import models import os import time import sys import datetime class ProductTestCase(TestCase): def setUp(self): self.expected_fields = { 'name': models.TextField, 'variety': models.TextField, 'alt_name': models.TextField, 'description': models.TextField, 'origin': models.TextField, 'season': models.TextField, 'available': models.NullBooleanField, 'market_price': models.TextField, 'link': models.TextField, 'image_id': models.ForeignKey, 'story_id': models.ForeignKey, 'created': models.DateTimeField, 'modified': models.DateTimeField, 'id': models.AutoField } def test_fields_exist(self): model = models.get_model('whats_fresh', 'Product') for field, field_type in self.expected_fields.items(): self.assertEqual( field_type, type(model._meta.get_field_by_name(field)[0])) def test_no_additional_fields(self): fields = Product._meta.get_all_field_names() self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys())) def test_created_modified_fields(self): self.assertTrue(Product._meta.get_field('modified').auto_now) self.assertTrue(Product._meta.get_field('created').auto_now_add)
Use ForeignKey for foreign keys and NullBooleanField in tests
Use ForeignKey for foreign keys and NullBooleanField in tests
Python
apache-2.0
osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
python
## Code Before: from django.test import TestCase from django.conf import settings from phonenumber_field.modelfields import PhoneNumberField from whats_fresh.models import * from django.contrib.gis.db import models import os import time import sys import datetime class ProductTestCase(TestCase): def setUp(self): self.expected_fields = { 'name': models.TextField, 'variety': models.TextField, 'alt_name': models.TextField, 'description': models.TextField, 'origin': models.TextField, 'season': models.TextField, 'available': models.BooleanField, 'market_price': models.TextField, 'link': models.TextField, 'image_id': models.FloatField, 'stories_id': models.FloatField, 'created': models.DateTimeField, 'modified': models.DateTimeField, 'id': models.AutoField } def test_fields_exist(self): model = models.get_model('whats_fresh', 'Product') for field, field_type in self.expected_fields.items(): self.assertEqual( field_type, type(model._meta.get_field_by_name(field)[0])) def test_no_additional_fields(self): fields = Product._meta.get_all_field_names() self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys())) def test_created_modified_fields(self): self.assertTrue(Product._meta.get_field('modified').auto_now) self.assertTrue(Product._meta.get_field('created').auto_now_add) ## Instruction: Use ForeignKey for foreign keys and NullBooleanField in tests ## Code After: from django.test import TestCase from django.conf import settings from phonenumber_field.modelfields import PhoneNumberField from whats_fresh.models import * from django.contrib.gis.db import models import os import time import sys import datetime class ProductTestCase(TestCase): def setUp(self): self.expected_fields = { 'name': models.TextField, 'variety': models.TextField, 'alt_name': models.TextField, 'description': models.TextField, 'origin': models.TextField, 'season': models.TextField, 'available': models.NullBooleanField, 'market_price': models.TextField, 'link': models.TextField, 'image_id': models.ForeignKey, 'story_id': models.ForeignKey, 'created': models.DateTimeField, 'modified': models.DateTimeField, 'id': models.AutoField } def test_fields_exist(self): model = models.get_model('whats_fresh', 'Product') for field, field_type in self.expected_fields.items(): self.assertEqual( field_type, type(model._meta.get_field_by_name(field)[0])) def test_no_additional_fields(self): fields = Product._meta.get_all_field_names() self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys())) def test_created_modified_fields(self): self.assertTrue(Product._meta.get_field('modified').auto_now) self.assertTrue(Product._meta.get_field('created').auto_now_add)
# ... existing code ... 'description': models.TextField, 'origin': models.TextField, 'season': models.TextField, 'available': models.NullBooleanField, 'market_price': models.TextField, 'link': models.TextField, 'image_id': models.ForeignKey, 'story_id': models.ForeignKey, 'created': models.DateTimeField, 'modified': models.DateTimeField, 'id': models.AutoField # ... rest of the code ...
254dfe87df2ae2897e368c3ede10df0f6700a07d
bundles/core/org.openhab.core/src/main/java/org/openhab/core/types/UnDefType.java
bundles/core/org.openhab.core/src/main/java/org/openhab/core/types/UnDefType.java
/** * Copyright (c) 2010-2015, openHAB.org and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.core.types; /** * There are situations when item states do not have any defined value. * This might be because they have not been initialized yet (never * received an state update so far) or because their state is ambiguous * (e.g. a dimmed light that is treated as a switch (ON/OFF) will have * an undefined state if it is dimmed to 50%). * * @author Kai Kreuzer * @since 0.1.0 * */ public enum UnDefType implements PrimitiveType, State { UNDEF, NULL; public String format(String pattern) { return String.format(pattern, this.toString()); } }
/** * Copyright (c) 2010-2015, openHAB.org and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.core.types; /** * There are situations when item states do not have any defined value. * This might be because they have not been initialized yet (never * received an state update so far) or because their state is ambiguous * (e.g. a dimmed light that is treated as a switch (ON/OFF) will have * an undefined state if it is dimmed to 50%). * * @author Kai Kreuzer * @since 0.1.0 * */ public enum UnDefType implements PrimitiveType, State { UNDEF, NULL; public String toString() { switch(this) { case UNDEF: return "Undefined"; case NULL: return "Uninitialized"; } return ""; } public String format(String pattern) { return String.format(pattern, this.toString()); } }
Revert "remove the special toString() in order to sync with ESH"
Revert "remove the special toString() in order to sync with ESH"
Java
epl-1.0
aruder77/openhab,dominicdesu/openhab,berndpfrommer/openhab,dmize/openhab,tomtrath/openhab,berndpfrommer/openhab,tomtrath/openhab,openhab/openhab,joek/openhab1-addons,hmerk/openhab,cdjackson/openhab,kreutpet/openhab,paixaop/openhab,kaikreuzer/openhab,ssalonen/openhab,netwolfuk/openhab,paixaop/openhab,openhab/openhab,steve-bate/openhab,TheOriginalAndrobot/openhab,coolweb/openhab,tomtrath/openhab,sibbi77/openhab,sedstef/openhab,watou/openhab,RafalLukawiecki/openhab1-addons,beowulfe/openhab,SwissKid/openhab,wuellueb/openhab,swatchy2dot0/openhab,hemantsangwan/openhab,mvolaart/openhab,teichsta/openhab,teichsta/openhab,netwolfuk/openhab,swatchy2dot0/openhab,sibbi77/openhab,seebag/openhab,QuailAutomation/openhab,andreasgebauer/openhab,taupinfada/openhab,wep4you/openhab,joek/openhab1-addons,svenschreier/openhab,ivanfmartinez/openhab,gerrieg/openhab,docbender/openhab,bakrus/openhab,lewie/openhab,idserda/openhab,jowiho/openhab,bbesser/openhab1-addons,aruder77/openhab,beowulfe/openhab,paphko/openhab,joek/openhab1-addons,dominicdesu/openhab,sumnerboy12/openhab,steintore/openhab,dbadia/openhab,evansj/openhab,falkena/openhab,ivanfmartinez/openhab,netwolfuk/openhab,paolodenti/openhab,dominicdesu/openhab,dvanherbergen/openhab,bakrus/openhab,steintore/openhab,hmerk/openhab,LaurensVanAcker/openhab,robnielsen/openhab,svenschaefer74/openhab,TheNetStriker/openhab,juri8/openhab,dmize/openhab,hmerk/openhab,svenschaefer74/openhab,computergeek1507/openhab,kaikreuzer/openhab,juri8/openhab,dvanherbergen/openhab,juri8/openhab,paixaop/openhab,watou/openhab,tarioch/openhab,watou/openhab,taupinfada/openhab,ivanfmartinez/openhab,cdjackson/openhab,dominicdesu/openhab,wuellueb/openhab,savageautomate/openhab,basriram/openhab,rmayr/openhab,seebag/openhab,LaurensVanAcker/openhab,dmize/openhab,cvanorman/openhab,mvolaart/openhab,druciak/openhab,Snickermicker/openhab,TheOriginalAndrobot/openhab,falkena/openhab,vgoldman/openhab,wuellueb/openhab,watou/openhab,rmayr/openhab,swatchy2dot0/openhab,vgoldman/openhab,steintore/openhab,querdenker2k/openhab,steintore/openhab,docbender/openhab,hemantsangwan/openhab,doubled-ca/openhab1-addons,vgoldman/openhab,dbadia/openhab,dbadia/openhab,aschor/openhab,lewie/openhab,sytone/openhab,seebag/openhab,openhab/openhab,coolweb/openhab,gerrieg/openhab,jowiho/openhab,dodger777/openhabMod,aschor/openhab,cschneider/openhab,Mixajlo/openhab,robnielsen/openhab,jowiho/openhab,rmayr/openhab,tdiekmann/openhab,idserda/openhab,SwissKid/openhab,TheOriginalAndrobot/openhab,QuailAutomation/openhab,bbesser/openhab1-addons,coolweb/openhab,TheNetStriker/openhab,cschneider/openhab,SwissKid/openhab,watou/openhab,aruder77/openhab,swatchy2dot0/openhab,svenschaefer74/openhab,kuijp/openhab,falkena/openhab,robnielsen/openhab,aruder77/openhab,ollie-dev/openhab,ollie-dev/openhab,ivanfmartinez/openhab,tarioch/openhab,revenz/openhab,sibbi77/openhab,taupinfada/openhab,cdjackson/openhab,aruder77/openhab,ssalonen/openhab,falkena/openhab,dominicdesu/openhab,cschneider/openhab,tarioch/openhab,QuailAutomation/openhab,paphko/openhab,paolodenti/openhab,cdjackson/openhab,berndpfrommer/openhab,dvanherbergen/openhab,Snickermicker/openhab,tomtrath/openhab,bakrus/openhab,evansj/openhab,querdenker2k/openhab,frami/openhab,Mixajlo/openhab,netwolfuk/openhab,theoweiss/openhab,svenschaefer74/openhab,ssalonen/openhab,sytone/openhab,bbesser/openhab1-addons,Snickermicker/openhab,querdenker2k/openhab,mrguessed/openhab,tarioch/openhab,dodger777/openhabMod,cschneider/openhab,swatchy2dot0/openhab,tdiekmann/openhab,seebag/openhab,coolweb/openhab,Mixajlo/openhab,computergeek1507/openhab,doubled-ca/openhab1-addons,RafalLukawiecki/openhab1-addons,steve-bate/openhab,RafalLukawiecki/openhab1-addons,svenschaefer74/openhab,sytone/openhab,bakrus/openhab,lewie/openhab,dmize/openhab,robbyb67/openhab,computergeek1507/openhab,Mixajlo/openhab,svenschreier/openhab,falkena/openhab,bakrus/openhab,robbyb67/openhab,revenz/openhab,kaikreuzer/openhab,tomtrath/openhab,teichsta/openhab,aschor/openhab,teichsta/openhab,idserda/openhab,docbender/openhab,hemantsangwan/openhab,savageautomate/openhab,CrackerStealth/openhab,paolodenti/openhab,sedstef/openhab,netwolfuk/openhab,joek/openhab1-addons,RafalLukawiecki/openhab1-addons,beowulfe/openhab,vgoldman/openhab,TheOriginalAndrobot/openhab,sumnerboy12/openhab,Snickermicker/openhab,SwissKid/openhab,teichsta/openhab,basriram/openhab,doubled-ca/openhab1-addons,tomtrath/openhab,teichsta/openhab,druciak/openhab,wuellueb/openhab,TheNetStriker/openhab,kaikreuzer/openhab,andreasgebauer/openhab,kaikreuzer/openhab,docbender/openhab,hmerk/openhab,querdenker2k/openhab,hmerk/openhab,wep4you/openhab,vgoldman/openhab,kuijp/openhab,paolodenti/openhab,paphko/openhab,cvanorman/openhab,taupinfada/openhab,mvolaart/openhab,evansj/openhab,cvanorman/openhab,tarioch/openhab,revenz/openhab,revenz/openhab,andreasgebauer/openhab,mvolaart/openhab,wep4you/openhab,computergeek1507/openhab,querdenker2k/openhab,druciak/openhab,theoweiss/openhab,falkena/openhab,bakrus/openhab,sytone/openhab,SwissKid/openhab,CrackerStealth/openhab,juri8/openhab,andreasgebauer/openhab,teichsta/openhab,tdiekmann/openhab,rmayr/openhab,cvanorman/openhab,mrguessed/openhab,mvolaart/openhab,evansj/openhab,sedstef/openhab,cdjackson/openhab,cschneider/openhab,Mixajlo/openhab,sedstef/openhab,rmayr/openhab,swatchy2dot0/openhab,beowulfe/openhab,hemantsangwan/openhab,jowiho/openhab,beowulfe/openhab,dbadia/openhab,robbyb67/openhab,druciak/openhab,docbender/openhab,cschneider/openhab,LaurensVanAcker/openhab,steve-bate/openhab,hemantsangwan/openhab,doubled-ca/openhab1-addons,tdiekmann/openhab,RafalLukawiecki/openhab1-addons,openhab/openhab,robbyb67/openhab,lewie/openhab,TheNetStriker/openhab,watou/openhab,kreutpet/openhab,frami/openhab,LaurensVanAcker/openhab,idserda/openhab,gerrieg/openhab,aruder77/openhab,theoweiss/openhab,sumnerboy12/openhab,jowiho/openhab,joek/openhab1-addons,revenz/openhab,kreutpet/openhab,QuailAutomation/openhab,dodger777/openhabMod,netwolfuk/openhab,dodger777/openhabMod,seebag/openhab,sibbi77/openhab,cschneider/openhab,ssalonen/openhab,LaurensVanAcker/openhab,vgoldman/openhab,juri8/openhab,juri8/openhab,computergeek1507/openhab,revenz/openhab,paixaop/openhab,kaikreuzer/openhab,ssalonen/openhab,kreutpet/openhab,paolodenti/openhab,paphko/openhab,openhab/openhab,dmize/openhab,bakrus/openhab,bbesser/openhab1-addons,aschor/openhab,mrguessed/openhab,idserda/openhab,berndpfrommer/openhab,ollie-dev/openhab,robnielsen/openhab,dodger777/openhabMod,dodger777/openhabMod,docbender/openhab,kuijp/openhab,mrguessed/openhab,berndpfrommer/openhab,TheNetStriker/openhab,sedstef/openhab,sibbi77/openhab,cvanorman/openhab,tarioch/openhab,bakrus/openhab,ollie-dev/openhab,beowulfe/openhab,coolweb/openhab,tdiekmann/openhab,gerrieg/openhab,savageautomate/openhab,SwissKid/openhab,mrguessed/openhab,frami/openhab,savageautomate/openhab,steintore/openhab,lewie/openhab,CrackerStealth/openhab,rmayr/openhab,revenz/openhab,basriram/openhab,Snickermicker/openhab,watou/openhab,savageautomate/openhab,theoweiss/openhab,gerrieg/openhab,Mixajlo/openhab,teichsta/openhab,TheOriginalAndrobot/openhab,sibbi77/openhab,dvanherbergen/openhab,coolweb/openhab,kuijp/openhab,aschor/openhab,falkena/openhab,QuailAutomation/openhab,paixaop/openhab,juri8/openhab,tomtrath/openhab,andreasgebauer/openhab,tomtrath/openhab,wep4you/openhab,tdiekmann/openhab,evansj/openhab,kuijp/openhab,ollie-dev/openhab,cvanorman/openhab,sedstef/openhab,savageautomate/openhab,bbesser/openhab1-addons,sytone/openhab,CrackerStealth/openhab,robnielsen/openhab,TheOriginalAndrobot/openhab,svenschreier/openhab,CrackerStealth/openhab,svenschreier/openhab,RafalLukawiecki/openhab1-addons,theoweiss/openhab,steve-bate/openhab,robbyb67/openhab,lewie/openhab,docbender/openhab,hemantsangwan/openhab,andreasgebauer/openhab,Snickermicker/openhab,ssalonen/openhab,wep4you/openhab,coolweb/openhab,joek/openhab1-addons,svenschreier/openhab,sumnerboy12/openhab,svenschaefer74/openhab,robbyb67/openhab,ollie-dev/openhab,dvanherbergen/openhab,steintore/openhab,sytone/openhab,steve-bate/openhab,ivanfmartinez/openhab,dvanherbergen/openhab,paphko/openhab,basriram/openhab,frami/openhab,andreasgebauer/openhab,robbyb67/openhab,paolodenti/openhab,steve-bate/openhab,ivanfmartinez/openhab,dodger777/openhabMod,mrguessed/openhab,paixaop/openhab,jowiho/openhab,paixaop/openhab,frami/openhab,taupinfada/openhab,kreutpet/openhab,wuellueb/openhab,CrackerStealth/openhab,dbadia/openhab,taupinfada/openhab,bbesser/openhab1-addons,dominicdesu/openhab,lewie/openhab,querdenker2k/openhab,hemantsangwan/openhab,doubled-ca/openhab1-addons,TheNetStriker/openhab,LaurensVanAcker/openhab,druciak/openhab,sumnerboy12/openhab,druciak/openhab,cdjackson/openhab,wuellueb/openhab,dbadia/openhab,basriram/openhab,idserda/openhab,evansj/openhab,frami/openhab,sumnerboy12/openhab,aschor/openhab,computergeek1507/openhab,wep4you/openhab,basriram/openhab,kreutpet/openhab,mvolaart/openhab,kuijp/openhab,berndpfrommer/openhab,robnielsen/openhab,doubled-ca/openhab1-addons,paphko/openhab,openhab/openhab,theoweiss/openhab,QuailAutomation/openhab,gerrieg/openhab,hmerk/openhab,TheNetStriker/openhab,dodger777/openhabMod,seebag/openhab,dmize/openhab,svenschreier/openhab,paixaop/openhab,cschneider/openhab,robbyb67/openhab
java
## Code Before: /** * Copyright (c) 2010-2015, openHAB.org and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.core.types; /** * There are situations when item states do not have any defined value. * This might be because they have not been initialized yet (never * received an state update so far) or because their state is ambiguous * (e.g. a dimmed light that is treated as a switch (ON/OFF) will have * an undefined state if it is dimmed to 50%). * * @author Kai Kreuzer * @since 0.1.0 * */ public enum UnDefType implements PrimitiveType, State { UNDEF, NULL; public String format(String pattern) { return String.format(pattern, this.toString()); } } ## Instruction: Revert "remove the special toString() in order to sync with ESH" ## Code After: /** * Copyright (c) 2010-2015, openHAB.org and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.core.types; /** * There are situations when item states do not have any defined value. * This might be because they have not been initialized yet (never * received an state update so far) or because their state is ambiguous * (e.g. a dimmed light that is treated as a switch (ON/OFF) will have * an undefined state if it is dimmed to 50%). * * @author Kai Kreuzer * @since 0.1.0 * */ public enum UnDefType implements PrimitiveType, State { UNDEF, NULL; public String toString() { switch(this) { case UNDEF: return "Undefined"; case NULL: return "Uninitialized"; } return ""; } public String format(String pattern) { return String.format(pattern, this.toString()); } }
// ... existing code ... public enum UnDefType implements PrimitiveType, State { UNDEF, NULL; public String toString() { switch(this) { case UNDEF: return "Undefined"; case NULL: return "Uninitialized"; } return ""; } public String format(String pattern) { return String.format(pattern, this.toString()); } // ... rest of the code ...
3602759b633f0643979c8f0970e088f29644b758
icekit/plugins/brightcove/models.py
icekit/plugins/brightcove/models.py
from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ from fluent_contents.models import ContentItem try: from django_brightcove.fields import BrightcoveField except ImportError: raise NotImplementedError( _( 'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.' ) ) @python_2_unicode_compatible class BrightcoveItem(ContentItem): """ Media from brightcove. Brightcove is a video editing and management product which can be found at http://brightcove.com/. They have in built APIs and players. The BrightcoveField is a django specific implementation to allow the embedding of videos. It anticipates the video ID will be used as a lookup value. In the template to be rendered you will need to include: <script type="text/javascript" src="http://admin.brightcove.com/js/BrightcoveExperiences.js" > </script> """ video = BrightcoveField( help_text=_('Provide the video ID from the brightcove video.') ) class Meta: verbose_name = _('Brightcove Video') verbose_name_plural = _('Brightcove Videos') def __str__(self): return str(self.video)
from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ from fluent_contents.models import ContentItem try: from django_brightcove.fields import BrightcoveField except ImportError: raise NotImplementedError( _( 'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.' ) ) @python_2_unicode_compatible class BrightcoveItem(ContentItem): """ Media from brightcove. Brightcove is a video editing and management product which can be found at http://brightcove.com/. They have in built APIs and players. The BrightcoveField is a django specific implementation to allow the embedding of videos. It anticipates the video ID will be used as a lookup value. """ video = BrightcoveField( help_text=_('Provide the video ID from the brightcove video.') ) class Meta: verbose_name = _('Brightcove Video') verbose_name_plural = _('Brightcove Videos') def __str__(self): return str(self.video)
Remove comment as media addition automatically happens.
Remove comment as media addition automatically happens.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
python
## Code Before: from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ from fluent_contents.models import ContentItem try: from django_brightcove.fields import BrightcoveField except ImportError: raise NotImplementedError( _( 'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.' ) ) @python_2_unicode_compatible class BrightcoveItem(ContentItem): """ Media from brightcove. Brightcove is a video editing and management product which can be found at http://brightcove.com/. They have in built APIs and players. The BrightcoveField is a django specific implementation to allow the embedding of videos. It anticipates the video ID will be used as a lookup value. In the template to be rendered you will need to include: <script type="text/javascript" src="http://admin.brightcove.com/js/BrightcoveExperiences.js" > </script> """ video = BrightcoveField( help_text=_('Provide the video ID from the brightcove video.') ) class Meta: verbose_name = _('Brightcove Video') verbose_name_plural = _('Brightcove Videos') def __str__(self): return str(self.video) ## Instruction: Remove comment as media addition automatically happens. ## Code After: from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ from fluent_contents.models import ContentItem try: from django_brightcove.fields import BrightcoveField except ImportError: raise NotImplementedError( _( 'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.' ) ) @python_2_unicode_compatible class BrightcoveItem(ContentItem): """ Media from brightcove. Brightcove is a video editing and management product which can be found at http://brightcove.com/. They have in built APIs and players. The BrightcoveField is a django specific implementation to allow the embedding of videos. It anticipates the video ID will be used as a lookup value. """ video = BrightcoveField( help_text=_('Provide the video ID from the brightcove video.') ) class Meta: verbose_name = _('Brightcove Video') verbose_name_plural = _('Brightcove Videos') def __str__(self): return str(self.video)
# ... existing code ... The BrightcoveField is a django specific implementation to allow the embedding of videos. It anticipates the video ID will be used as a lookup value. """ video = BrightcoveField( help_text=_('Provide the video ID from the brightcove video.') # ... rest of the code ...
e91efeab9668f1d9a15f44b97c30228ecf7450c4
rabix-executor/src/main/java/org/rabix/executor/pathmapper/local/LocalPathMapper.java
rabix-executor/src/main/java/org/rabix/executor/pathmapper/local/LocalPathMapper.java
package org.rabix.executor.pathmapper.local; import java.io.File; import java.io.IOException; import java.util.Map; import org.rabix.bindings.mapper.FileMappingException; import org.rabix.bindings.mapper.FilePathMapper; import org.rabix.executor.config.StorageConfiguration; import com.google.inject.Inject; public class LocalPathMapper implements FilePathMapper { private final StorageConfiguration storageConfig; @Inject public LocalPathMapper(final StorageConfiguration storageConfig) { this.storageConfig = storageConfig; } @Override public String map(String path, Map<String, Object> config) throws FileMappingException { if (!path.startsWith(File.separator)) { try { return new File(storageConfig.getPhysicalExecutionBaseDir(), path).getCanonicalPath(); } catch (IOException e) { throw new FileMappingException(e); } } return path; } }
package org.rabix.executor.pathmapper.local; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.util.Map; import org.rabix.bindings.mapper.FileMappingException; import org.rabix.bindings.mapper.FilePathMapper; import org.rabix.executor.config.StorageConfiguration; import com.google.inject.Inject; public class LocalPathMapper implements FilePathMapper { private final StorageConfiguration storageConfig; @Inject public LocalPathMapper(final StorageConfiguration storageConfig) { this.storageConfig = storageConfig; } @Override public String map(String path, Map<String, Object> config) throws FileMappingException { if (!Paths.get(path).isAbsolute()) { try { return new File(storageConfig.getPhysicalExecutionBaseDir(), path).getCanonicalPath(); } catch (IOException e) { throw new FileMappingException(e); } } return path; } }
Fix issue with Windows paths
Fix issue with Windows paths
Java
apache-2.0
rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny
java
## Code Before: package org.rabix.executor.pathmapper.local; import java.io.File; import java.io.IOException; import java.util.Map; import org.rabix.bindings.mapper.FileMappingException; import org.rabix.bindings.mapper.FilePathMapper; import org.rabix.executor.config.StorageConfiguration; import com.google.inject.Inject; public class LocalPathMapper implements FilePathMapper { private final StorageConfiguration storageConfig; @Inject public LocalPathMapper(final StorageConfiguration storageConfig) { this.storageConfig = storageConfig; } @Override public String map(String path, Map<String, Object> config) throws FileMappingException { if (!path.startsWith(File.separator)) { try { return new File(storageConfig.getPhysicalExecutionBaseDir(), path).getCanonicalPath(); } catch (IOException e) { throw new FileMappingException(e); } } return path; } } ## Instruction: Fix issue with Windows paths ## Code After: package org.rabix.executor.pathmapper.local; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.util.Map; import org.rabix.bindings.mapper.FileMappingException; import org.rabix.bindings.mapper.FilePathMapper; import org.rabix.executor.config.StorageConfiguration; import com.google.inject.Inject; public class LocalPathMapper implements FilePathMapper { private final StorageConfiguration storageConfig; @Inject public LocalPathMapper(final StorageConfiguration storageConfig) { this.storageConfig = storageConfig; } @Override public String map(String path, Map<String, Object> config) throws FileMappingException { if (!Paths.get(path).isAbsolute()) { try { return new File(storageConfig.getPhysicalExecutionBaseDir(), path).getCanonicalPath(); } catch (IOException e) { throw new FileMappingException(e); } } return path; } }
... import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.util.Map; import org.rabix.bindings.mapper.FileMappingException; ... @Override public String map(String path, Map<String, Object> config) throws FileMappingException { if (!Paths.get(path).isAbsolute()) { try { return new File(storageConfig.getPhysicalExecutionBaseDir(), path).getCanonicalPath(); } catch (IOException e) { ...
7c5061e4fbf0737ce07f13cb9102cdbbacf73115
pyethapp/tests/test_genesis.py
pyethapp/tests/test_genesis.py
import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES def check_genesis(profile): config = dict(eth=dict()) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) print config['eth'].keys() bc = config['eth']['block'] print bc.keys() env = Env(DB(), bc) genesis = blocks.genesis(env) print 'genesis.hash', genesis.hash.encode('hex') print 'expected', config['eth']['genesis_hash'] assert genesis.hash == config['eth']['genesis_hash'].decode('hex') @pytest.mark.xfail # FIXME def test_olympic(): check_genesis('olympic') def test_frontier(): check_genesis('frontier') if __name__ == '__main__': test_genesis()
from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES @pytest.mark.parametrize('profile', PROFILES.keys()) def test_profile(profile): config = dict(eth=dict()) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) bc = config['eth']['block'] pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
Fix & cleanup profile genesis tests
Fix & cleanup profile genesis tests
Python
mit
ethereum/pyethapp,gsalgado/pyethapp,gsalgado/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp
python
## Code Before: import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES def check_genesis(profile): config = dict(eth=dict()) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) print config['eth'].keys() bc = config['eth']['block'] print bc.keys() env = Env(DB(), bc) genesis = blocks.genesis(env) print 'genesis.hash', genesis.hash.encode('hex') print 'expected', config['eth']['genesis_hash'] assert genesis.hash == config['eth']['genesis_hash'].decode('hex') @pytest.mark.xfail # FIXME def test_olympic(): check_genesis('olympic') def test_frontier(): check_genesis('frontier') if __name__ == '__main__': test_genesis() ## Instruction: Fix & cleanup profile genesis tests ## Code After: from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES @pytest.mark.parametrize('profile', PROFILES.keys()) def test_profile(profile): config = dict(eth=dict()) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) bc = config['eth']['block'] pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
... from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB ... from pyethapp.profiles import PROFILES @pytest.mark.parametrize('profile', PROFILES.keys()) def test_profile(profile): config = dict(eth=dict()) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) ... # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) bc = config['eth']['block'] pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) assert genesis.hash.encode('hex') == config['eth']['genesis_hash'] ...
5b59c10c035735ee9391f38b1f75358c767020d7
src/main/java/com/grayben/riskExtractor/htmlScorer/ScoredText.java
src/main/java/com/grayben/riskExtractor/htmlScorer/ScoredText.java
package com.grayben.riskExtractor.htmlScorer; import java.util.ArrayList; import java.util.List; public class ScoredText { private List<ScoredTextElement> text; public ScoredText() { super(); text = new ArrayList<>(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (ScoredTextElement element : text) { sb.append(element.getTextElement()); sb.append(" "); } return sb.toString().trim(); } public void add(ScoredTextElement st) { if(st.getScores() == null){ throw new NullPointerException( "param.getScores() must not be null" ); } if(st.getScores().isEmpty()){ throw new IllegalArgumentException( "param.getScores() must not be empty" ); } if(st.getTextElement() == null){ throw new NullPointerException( "param.getTextElement() must not be null" ); } if(st.getTextElement().isEmpty()){ throw new IllegalArgumentException( "param.getTextElement() must not be empty" ); } text.add(st); } }
package com.grayben.riskExtractor.htmlScorer; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ScoredText { private List<ScoredTextElement> text; public ScoredText() { super(); text = new ArrayList<>(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (ScoredTextElement element : text) { sb.append(element.getTextElement()); sb.append(" "); } return sb.toString().trim(); } public void add(ScoredTextElement st) { if(st.getScores() == null){ throw new NullPointerException( "param.getScores() must not be null" ); } if(st.getScores().isEmpty()){ throw new IllegalArgumentException( "param.getScores() must not be empty" ); } if(st.getTextElement() == null){ throw new NullPointerException( "param.getTextElement() must not be null" ); } if(st.getTextElement().isEmpty()){ throw new IllegalArgumentException( "param.getTextElement() must not be empty" ); } text.add(st); } public List<ScoredTextElement> getList(){ return Collections.unmodifiableList(this.text); } }
Add access to list of elements
Add access to list of elements
Java
mit
grayben/10K-item-extractor,grayben/10K-item-extractor
java
## Code Before: package com.grayben.riskExtractor.htmlScorer; import java.util.ArrayList; import java.util.List; public class ScoredText { private List<ScoredTextElement> text; public ScoredText() { super(); text = new ArrayList<>(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (ScoredTextElement element : text) { sb.append(element.getTextElement()); sb.append(" "); } return sb.toString().trim(); } public void add(ScoredTextElement st) { if(st.getScores() == null){ throw new NullPointerException( "param.getScores() must not be null" ); } if(st.getScores().isEmpty()){ throw new IllegalArgumentException( "param.getScores() must not be empty" ); } if(st.getTextElement() == null){ throw new NullPointerException( "param.getTextElement() must not be null" ); } if(st.getTextElement().isEmpty()){ throw new IllegalArgumentException( "param.getTextElement() must not be empty" ); } text.add(st); } } ## Instruction: Add access to list of elements ## Code After: package com.grayben.riskExtractor.htmlScorer; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ScoredText { private List<ScoredTextElement> text; public ScoredText() { super(); text = new ArrayList<>(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (ScoredTextElement element : text) { sb.append(element.getTextElement()); sb.append(" "); } return sb.toString().trim(); } public void add(ScoredTextElement st) { if(st.getScores() == null){ throw new NullPointerException( "param.getScores() must not be null" ); } if(st.getScores().isEmpty()){ throw new IllegalArgumentException( "param.getScores() must not be empty" ); } if(st.getTextElement() == null){ throw new NullPointerException( "param.getTextElement() must not be null" ); } if(st.getTextElement().isEmpty()){ throw new IllegalArgumentException( "param.getTextElement() must not be empty" ); } text.add(st); } public List<ScoredTextElement> getList(){ return Collections.unmodifiableList(this.text); } }
// ... existing code ... package com.grayben.riskExtractor.htmlScorer; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ScoredText { // ... modified code ... } text.add(st); } public List<ScoredTextElement> getList(){ return Collections.unmodifiableList(this.text); } } // ... rest of the code ...
faf0b8c401c31419a11cd8bbc640340f2c6d52da
lib/Target/X86/X86InstrBuilder.h
lib/Target/X86/X86InstrBuilder.h
//===-- X86InstrBuilder.h - Functions to aid building x86 insts -*- C++ -*-===// // // This file exposes functions that may be used with BuildMI from the // MachineInstrBuilder.h file to handle X86'isms in a clean way. // // The BuildMem function may be used with the BuildMI function to add entire // memory references in a single, typed, function call. X86 memory references // can be very complex expressions (described in the README), so wrapping them // up behind an easier to use interface makes sense. Descriptions of the // functions are included below. // //===----------------------------------------------------------------------===// #ifndef X86INSTRBUILDER_H #define X86INSTRBUILDER_H #include "llvm/CodeGen/MachineInstrBuilder.h" /// addDirectMem - This function is used to add a direct memory reference to the /// current instruction. Because memory references are always represented with /// four values, this adds: Reg, [1, NoReg, 0] to the instruction /// inline const MachineInstrBuilder &addDirectMem(const MachineInstrBuilder &MIB, unsigned Reg) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(0); } #endif
//===-- X86InstrBuilder.h - Functions to aid building x86 insts -*- C++ -*-===// // // This file exposes functions that may be used with BuildMI from the // MachineInstrBuilder.h file to handle X86'isms in a clean way. // // The BuildMem function may be used with the BuildMI function to add entire // memory references in a single, typed, function call. X86 memory references // can be very complex expressions (described in the README), so wrapping them // up behind an easier to use interface makes sense. Descriptions of the // functions are included below. // //===----------------------------------------------------------------------===// #ifndef X86INSTRBUILDER_H #define X86INSTRBUILDER_H #include "llvm/CodeGen/MachineInstrBuilder.h" /// addDirectMem - This function is used to add a direct memory reference to the /// current instruction. Because memory references are always represented with /// four values, this adds: Reg, [1, NoReg, 0] to the instruction /// inline const MachineInstrBuilder &addDirectMem(const MachineInstrBuilder &MIB, unsigned Reg) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(0); } /// addRegOffset - /// /// inline const MachineInstrBuilder &addRegOffset(const MachineInstrBuilder &MIB, unsigned Reg, unsigned Offset) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(Offset); } #endif
Add a simple way to add memory locations of format
Add a simple way to add memory locations of format [reg+offset] git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@4825 91177308-0d34-0410-b5e6-96231b3b80d8
C
apache-2.0
GPUOpen-Drivers/llvm,llvm-mirror/llvm,chubbymaggie/asap,chubbymaggie/asap,llvm-mirror/llvm,apple/swift-llvm,dslab-epfl/asap,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,chubbymaggie/asap,dslab-epfl/asap,dslab-epfl/asap,apple/swift-llvm,apple/swift-llvm,chubbymaggie/asap,chubbymaggie/asap,GPUOpen-Drivers/llvm,chubbymaggie/asap,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,dslab-epfl/asap,dslab-epfl/asap,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,dslab-epfl/asap
c
## Code Before: //===-- X86InstrBuilder.h - Functions to aid building x86 insts -*- C++ -*-===// // // This file exposes functions that may be used with BuildMI from the // MachineInstrBuilder.h file to handle X86'isms in a clean way. // // The BuildMem function may be used with the BuildMI function to add entire // memory references in a single, typed, function call. X86 memory references // can be very complex expressions (described in the README), so wrapping them // up behind an easier to use interface makes sense. Descriptions of the // functions are included below. // //===----------------------------------------------------------------------===// #ifndef X86INSTRBUILDER_H #define X86INSTRBUILDER_H #include "llvm/CodeGen/MachineInstrBuilder.h" /// addDirectMem - This function is used to add a direct memory reference to the /// current instruction. Because memory references are always represented with /// four values, this adds: Reg, [1, NoReg, 0] to the instruction /// inline const MachineInstrBuilder &addDirectMem(const MachineInstrBuilder &MIB, unsigned Reg) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(0); } #endif ## Instruction: Add a simple way to add memory locations of format [reg+offset] git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@4825 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: //===-- X86InstrBuilder.h - Functions to aid building x86 insts -*- C++ -*-===// // // This file exposes functions that may be used with BuildMI from the // MachineInstrBuilder.h file to handle X86'isms in a clean way. // // The BuildMem function may be used with the BuildMI function to add entire // memory references in a single, typed, function call. X86 memory references // can be very complex expressions (described in the README), so wrapping them // up behind an easier to use interface makes sense. Descriptions of the // functions are included below. // //===----------------------------------------------------------------------===// #ifndef X86INSTRBUILDER_H #define X86INSTRBUILDER_H #include "llvm/CodeGen/MachineInstrBuilder.h" /// addDirectMem - This function is used to add a direct memory reference to the /// current instruction. Because memory references are always represented with /// four values, this adds: Reg, [1, NoReg, 0] to the instruction /// inline const MachineInstrBuilder &addDirectMem(const MachineInstrBuilder &MIB, unsigned Reg) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(0); } /// addRegOffset - /// /// inline const MachineInstrBuilder &addRegOffset(const MachineInstrBuilder &MIB, unsigned Reg, unsigned Offset) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(Offset); } #endif
... return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(0); } /// addRegOffset - /// /// inline const MachineInstrBuilder &addRegOffset(const MachineInstrBuilder &MIB, unsigned Reg, unsigned Offset) { return MIB.addReg(Reg).addZImm(1).addMReg(0).addSImm(Offset); } #endif ...
c970661c4525e0f3a9c77935ccfbef62742b18d4
csympy/__init__.py
csympy/__init__.py
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var def test(): import pytest, os return not pytest.cmdline.main( [os.path.dirname(os.path.abspath(__file__))])
Add test function so tests can be run from within python terminal
Add test function so tests can be run from within python terminal import csympy csympy.test()
Python
mit
symengine/symengine.py,bjodah/symengine.py,bjodah/symengine.py,symengine/symengine.py,symengine/symengine.py,bjodah/symengine.py
python
## Code Before: from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var ## Instruction: Add test function so tests can be run from within python terminal import csympy csympy.test() ## Code After: from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var def test(): import pytest, os return not pytest.cmdline.main( [os.path.dirname(os.path.abspath(__file__))])
// ... existing code ... from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add, Mul, Pow, sin, cos, sqrt, function_symbol, I) from .utilities import var def test(): import pytest, os return not pytest.cmdline.main( [os.path.dirname(os.path.abspath(__file__))]) // ... rest of the code ...
6a7d741da6124ec3d8607b5780608b51b7aca8ba
editorconfig/exceptions.py
editorconfig/exceptions.py
class EditorConfigError(Exception): """Parent class of all exceptions raised by EditorConfig""" from ConfigParser import ParsingError as _ParsingError class ParsingError(_ParsingError, EditorConfigError): """Error raised if an EditorConfig file could not be parsed""" class PathError(ValueError, EditorConfigError): """Error raised if invalid filepath is specified""" class VersionError(ValueError, EditorConfigError): """Error raised if invalid version number is specified"""
class EditorConfigError(Exception): """Parent class of all exceptions raised by EditorConfig""" try: from ConfigParser import ParsingError as _ParsingError except: from configparser import ParsingError as _ParsingError class ParsingError(_ParsingError, EditorConfigError): """Error raised if an EditorConfig file could not be parsed""" class PathError(ValueError, EditorConfigError): """Error raised if invalid filepath is specified""" class VersionError(ValueError, EditorConfigError): """Error raised if invalid version number is specified"""
Fix broken ConfigParser import for Python3
Fix broken ConfigParser import for Python3
Python
bsd-2-clause
benjifisher/editorconfig-vim,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,johnfraney/editorconfig-vim
python
## Code Before: class EditorConfigError(Exception): """Parent class of all exceptions raised by EditorConfig""" from ConfigParser import ParsingError as _ParsingError class ParsingError(_ParsingError, EditorConfigError): """Error raised if an EditorConfig file could not be parsed""" class PathError(ValueError, EditorConfigError): """Error raised if invalid filepath is specified""" class VersionError(ValueError, EditorConfigError): """Error raised if invalid version number is specified""" ## Instruction: Fix broken ConfigParser import for Python3 ## Code After: class EditorConfigError(Exception): """Parent class of all exceptions raised by EditorConfig""" try: from ConfigParser import ParsingError as _ParsingError except: from configparser import ParsingError as _ParsingError class ParsingError(_ParsingError, EditorConfigError): """Error raised if an EditorConfig file could not be parsed""" class PathError(ValueError, EditorConfigError): """Error raised if invalid filepath is specified""" class VersionError(ValueError, EditorConfigError): """Error raised if invalid version number is specified"""
... """Parent class of all exceptions raised by EditorConfig""" try: from ConfigParser import ParsingError as _ParsingError except: from configparser import ParsingError as _ParsingError class ParsingError(_ParsingError, EditorConfigError): ...
78e746d58cd14f06edb0805422eab833492723f4
12/E12_27.java
12/E12_27.java
/* Suppose you have a lot of files in a directory that contain words Exercisei_j, where i and j are digits. Write a program that pads a 0 before i if i is a single digit and 0 before j if j is a single digit. For example, the word Exercise2_1 in a file will be replaced by Exercise02_01. Use the following command to run our program. Use the following command to run your program. java E12_27 * */ import java.io.File; public class E12_27 { public static void main(String[] args) { for (String s: args) { File file = new File(s); if (s.matches(".*Exercise\\d_\\d.*")) { StringBuilder sb = new StringBuilder(s); int index = sb.indexOf("Exercise"); sb.insert((index + 8), "0"); sb.insert((index + 11), "0"); File newName = new File(sb.toString()); file.renameTo(newName); } } } }
/* Suppose you have a lot of files in a directory that contain words Exercisei_j, where i and j are digits. Write a program that pads a 0 before i if i is a single digit and 0 before j if j is a single digit. For example, the word Exercise2_1 in a file will be replaced by Exercise02_01. Use the following command to run our program. Use the following command to run your program. java E12_27 * */ import java.io.File; public class E12_27 { public static void main(String[] args) { for (String s: args) { File file = new File(s); StringBuilder sb = new StringBuilder(s); if (s.matches("Exercise\\d_\\d")) { sb.insert(8, "0"); sb.insert(11, "0"); } else if (s.matches("Exercise\\d_\\d+")) { sb.insert(8, "0"); } else if (s.matches("Exercise\\d+_\\d")) { sb.insert(11, "0"); } File newName = new File(sb.toString()); file.renameTo(newName); } } }
Modify solution to fit the exercise specifications more closely
Modify solution to fit the exercise specifications more closely
Java
mit
maxalthoff/intro-to-java-exercises
java
## Code Before: /* Suppose you have a lot of files in a directory that contain words Exercisei_j, where i and j are digits. Write a program that pads a 0 before i if i is a single digit and 0 before j if j is a single digit. For example, the word Exercise2_1 in a file will be replaced by Exercise02_01. Use the following command to run our program. Use the following command to run your program. java E12_27 * */ import java.io.File; public class E12_27 { public static void main(String[] args) { for (String s: args) { File file = new File(s); if (s.matches(".*Exercise\\d_\\d.*")) { StringBuilder sb = new StringBuilder(s); int index = sb.indexOf("Exercise"); sb.insert((index + 8), "0"); sb.insert((index + 11), "0"); File newName = new File(sb.toString()); file.renameTo(newName); } } } } ## Instruction: Modify solution to fit the exercise specifications more closely ## Code After: /* Suppose you have a lot of files in a directory that contain words Exercisei_j, where i and j are digits. Write a program that pads a 0 before i if i is a single digit and 0 before j if j is a single digit. For example, the word Exercise2_1 in a file will be replaced by Exercise02_01. Use the following command to run our program. Use the following command to run your program. java E12_27 * */ import java.io.File; public class E12_27 { public static void main(String[] args) { for (String s: args) { File file = new File(s); StringBuilder sb = new StringBuilder(s); if (s.matches("Exercise\\d_\\d")) { sb.insert(8, "0"); sb.insert(11, "0"); } else if (s.matches("Exercise\\d_\\d+")) { sb.insert(8, "0"); } else if (s.matches("Exercise\\d+_\\d")) { sb.insert(11, "0"); } File newName = new File(sb.toString()); file.renameTo(newName); } } }
# ... existing code ... public static void main(String[] args) { for (String s: args) { File file = new File(s); StringBuilder sb = new StringBuilder(s); if (s.matches("Exercise\\d_\\d")) { sb.insert(8, "0"); sb.insert(11, "0"); } else if (s.matches("Exercise\\d_\\d+")) { sb.insert(8, "0"); } else if (s.matches("Exercise\\d+_\\d")) { sb.insert(11, "0"); } File newName = new File(sb.toString()); file.renameTo(newName); } } } # ... rest of the code ...
60ef934e3bef7c00fc2d1823901babb665a4888f
get_study_attachments.py
get_study_attachments.py
import sys import boto3 BUCKET_NAME = 'mitLookit' def get_all_study_attachments(study_uuid): s3 = boto3.resource('s3') bucket = s3.Bucket(BUCKET_NAME) study_files = [] for key in bucket.objects.filter(Prefix=f'videoStream_{study_uuid}'): study_files.append(key) return study_files if __name__ == '__main__': study_uuid = sys.argv[1] get_study_keys(study_uuid)
import sys import boto3 BUCKET_NAME = 'mitLookit' def get_all_study_attachments(study_uuid): s3 = boto3.resource('s3') bucket = s3.Bucket(BUCKET_NAME) return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}') if __name__ == '__main__': study_uuid = sys.argv[1] get_study_keys(study_uuid)
Remove looping through items and appending them to list.
Remove looping through items and appending them to list.
Python
apache-2.0
CenterForOpenScience/lookit-api,pattisdr/lookit-api,pattisdr/lookit-api,pattisdr/lookit-api,CenterForOpenScience/lookit-api,CenterForOpenScience/lookit-api
python
## Code Before: import sys import boto3 BUCKET_NAME = 'mitLookit' def get_all_study_attachments(study_uuid): s3 = boto3.resource('s3') bucket = s3.Bucket(BUCKET_NAME) study_files = [] for key in bucket.objects.filter(Prefix=f'videoStream_{study_uuid}'): study_files.append(key) return study_files if __name__ == '__main__': study_uuid = sys.argv[1] get_study_keys(study_uuid) ## Instruction: Remove looping through items and appending them to list. ## Code After: import sys import boto3 BUCKET_NAME = 'mitLookit' def get_all_study_attachments(study_uuid): s3 = boto3.resource('s3') bucket = s3.Bucket(BUCKET_NAME) return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}') if __name__ == '__main__': study_uuid = sys.argv[1] get_study_keys(study_uuid)
... def get_all_study_attachments(study_uuid): s3 = boto3.resource('s3') bucket = s3.Bucket(BUCKET_NAME) return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}') if __name__ == '__main__': study_uuid = sys.argv[1] ...
eddd7f856c7dc423c387d496a87cf5fdf941215b
helpers/visited_thread_set.py
helpers/visited_thread_set.py
class VisitedThreadSet(): set = None def __init__(self): pass def load(self): pass def save(self): pass def add_thread(self): pass def check_thread_exists(self): pass
class VisitedThreadSet(): set = None def __init__(self): self.set = set() def load_set(self): pass def save_set(self): pass def add(self, value): self.set.add(str(value)) def contains(self, value): if str(value) in self.set: return True else: return False
Add value to VisitedThreadSet or check if it exists
New: Add value to VisitedThreadSet or check if it exists
Python
mit
AFFogarty/SEP-Bot,AFFogarty/SEP-Bot
python
## Code Before: class VisitedThreadSet(): set = None def __init__(self): pass def load(self): pass def save(self): pass def add_thread(self): pass def check_thread_exists(self): pass ## Instruction: New: Add value to VisitedThreadSet or check if it exists ## Code After: class VisitedThreadSet(): set = None def __init__(self): self.set = set() def load_set(self): pass def save_set(self): pass def add(self, value): self.set.add(str(value)) def contains(self, value): if str(value) in self.set: return True else: return False
// ... existing code ... set = None def __init__(self): self.set = set() def load_set(self): pass def save_set(self): pass def add(self, value): self.set.add(str(value)) def contains(self, value): if str(value) in self.set: return True else: return False // ... rest of the code ...
3313d611d7cc66bf607a341a5d9a6a5d96dfbec5
clowder_server/emailer.py
clowder_server/emailer.py
import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = '[email protected]' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"} requests.post(url, json=payload) send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = '[email protected]' def send_alert(company, name): slack_sent = False for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86 and not slack_sent: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"} requests.post(url, json=payload) slack_sent = True send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
Rename bot and prevent channel spamming
Rename bot and prevent channel spamming
Python
agpl-3.0
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server
python
## Code Before: import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = '[email protected]' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"} requests.post(url, json=payload) send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True) ## Instruction: Rename bot and prevent channel spamming ## Code After: import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = '[email protected]' def send_alert(company, name): slack_sent = False for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86 and not slack_sent: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"} requests.post(url, json=payload) slack_sent = True send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
# ... existing code ... ADMIN_EMAIL = '[email protected]' def send_alert(company, name): slack_sent = False for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86 and not slack_sent: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"} requests.post(url, json=payload) slack_sent = True send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True) # ... rest of the code ...
086ad04698d337ba3b4505b320e97eb1c4360e84
tests/sarray_tests.c
tests/sarray_tests.c
char *test_create_destroy() { char test[] = "abracadabra"; char test2[] = "acadabra"; SuffixArray *sarry = SuffixArray_create(test, sizeof(test)); mu_assert(sarry, "Failed to create."); int at = SuffixArray_find_suffix(sarry, test2, sizeof(test2)); mu_assert(at != -1, "Failed to find the suffix."); at = SuffixArray_find_suffix(sarry, "yo", sizeof("yo")); mu_assert(at == -1, "Should fail to find yo."); SuffixArray_destroy(sarry); return NULL; } char *all_tests() { mu_suite_start(); mu_run_test(test_create_destroy); return NULL; } RUN_TESTS(all_tests);
static SuffixArray *sarry = NULL; static char test[] = "abracadabra"; static char test2[] = "acadabra"; char *test_create() { sarry = SuffixArray_create(test, sizeof(test)); mu_assert(sarry, "Failed to create."); return NULL; } char *test_destroy() { SuffixArray_destroy(sarry); return NULL; } char *test_find_suffix() { int at = SuffixArray_find_suffix(sarry, test2, sizeof(test2)); mu_assert(at != -1, "Failed to find the suffix."); at = SuffixArray_find_suffix(sarry, "yo", sizeof("yo")); mu_assert(at == -1, "Should fail to find yo."); return NULL; } char *all_tests() { mu_suite_start(); mu_run_test(test_create); mu_run_test(test_find_suffix); mu_run_test(test_destroy); return NULL; } RUN_TESTS(all_tests);
Clean up suffix array tests.
Clean up suffix array tests.
C
bsd-3-clause
HappyYang/liblcthw,HappyYang/liblcthw,HappyYang/liblcthw
c
## Code Before: char *test_create_destroy() { char test[] = "abracadabra"; char test2[] = "acadabra"; SuffixArray *sarry = SuffixArray_create(test, sizeof(test)); mu_assert(sarry, "Failed to create."); int at = SuffixArray_find_suffix(sarry, test2, sizeof(test2)); mu_assert(at != -1, "Failed to find the suffix."); at = SuffixArray_find_suffix(sarry, "yo", sizeof("yo")); mu_assert(at == -1, "Should fail to find yo."); SuffixArray_destroy(sarry); return NULL; } char *all_tests() { mu_suite_start(); mu_run_test(test_create_destroy); return NULL; } RUN_TESTS(all_tests); ## Instruction: Clean up suffix array tests. ## Code After: static SuffixArray *sarry = NULL; static char test[] = "abracadabra"; static char test2[] = "acadabra"; char *test_create() { sarry = SuffixArray_create(test, sizeof(test)); mu_assert(sarry, "Failed to create."); return NULL; } char *test_destroy() { SuffixArray_destroy(sarry); return NULL; } char *test_find_suffix() { int at = SuffixArray_find_suffix(sarry, test2, sizeof(test2)); mu_assert(at != -1, "Failed to find the suffix."); at = SuffixArray_find_suffix(sarry, "yo", sizeof("yo")); mu_assert(at == -1, "Should fail to find yo."); return NULL; } char *all_tests() { mu_suite_start(); mu_run_test(test_create); mu_run_test(test_find_suffix); mu_run_test(test_destroy); return NULL; } RUN_TESTS(all_tests);
# ... existing code ... static SuffixArray *sarry = NULL; static char test[] = "abracadabra"; static char test2[] = "acadabra"; char *test_create() { sarry = SuffixArray_create(test, sizeof(test)); mu_assert(sarry, "Failed to create."); return NULL; } char *test_destroy() { SuffixArray_destroy(sarry); return NULL; } char *test_find_suffix() { int at = SuffixArray_find_suffix(sarry, test2, sizeof(test2)); mu_assert(at != -1, "Failed to find the suffix."); at = SuffixArray_find_suffix(sarry, "yo", sizeof("yo")); mu_assert(at == -1, "Should fail to find yo."); return NULL; } # ... modified code ... char *all_tests() { mu_suite_start(); mu_run_test(test_create); mu_run_test(test_find_suffix); mu_run_test(test_destroy); return NULL; } # ... rest of the code ...
ae3d8fd826647c8d853247b069726a26f4ae462d
exterminal.py
exterminal.py
import sublime, sublime_plugin import os def wrapped_exec(self, *args, **kwargs): settings = sublime.load_settings("SublimeExterminal.sublime-settings") if settings.get('enabled') and kwargs.get('use_exterminal', True): wrapper = settings.get('exec_wrapper') try: shell_cmd = kwargs.get('shell_cmd', '') shell_cmd = wrapper % shell_cmd.replace('"','\\"') kwargs['shell_cmd'] = shell_cmd except KeyError: pass try: cmd = ' '.join(kwargs.get('cmd')) kwargs['shell_cmd'] = wrapper % cmd.replace('"','\\"') except KeyError: pass return self.run_cached_by_exterminal(*args, **kwargs) def plugin_loaded(): exec_cls = [cls for cls in sublime_plugin.window_command_classes \ if cls.__qualname__=='ExecCommand'][0] if hasattr(exec_cls(None), 'run_cached_by_exterminal'): exec_cls.run = exec_cls.run_cached_by_exterminal exec_cls.run_cached_by_exterminal = None exec_cls.run_cached_by_exterminal = exec_cls.run exec_cls.run = wrapped_exec class StartExterminalCommand(sublime_plugin.WindowCommand): def run(self, *args): settings = sublime.load_settings("SublimeExterminal.sublime-settings") cmd = settings.get('start_exterminal', '') os.popen(cmd)
import sublime, sublime_plugin import os def wrapped_exec(self, *args, **kwargs): settings = sublime.load_settings("SublimeExterminal.sublime-settings") if settings.get('enabled') and kwargs.get('use_exterminal', True): wrapper = settings.get('exec_wrapper') try: shell_cmd = kwargs.get('shell_cmd') shell_cmd = wrapper % shell_cmd.replace('"','\\"') kwargs['shell_cmd'] = shell_cmd except KeyError: pass try: cmd = ' '.join(kwargs.get('cmd')) kwargs['shell_cmd'] = wrapper % cmd.replace('"','\\"') except KeyError: pass return self.run_cached_by_exterminal(*args, **kwargs) def plugin_loaded(): exec_cls = [cls for cls in sublime_plugin.window_command_classes \ if cls.__qualname__=='ExecCommand'][0] if hasattr(exec_cls(None), 'run_cached_by_exterminal'): exec_cls.run = exec_cls.run_cached_by_exterminal exec_cls.run_cached_by_exterminal = None exec_cls.run_cached_by_exterminal = exec_cls.run exec_cls.run = wrapped_exec class StartExterminalCommand(sublime_plugin.WindowCommand): def run(self, *args): settings = sublime.load_settings("SublimeExterminal.sublime-settings") cmd = settings.get('start_exterminal', '') os.popen(cmd)
Fix dangling default in kwargs 'shell_cmd'
Fix dangling default in kwargs 'shell_cmd'
Python
mit
jemc/SublimeExterminal,jemc/SublimeExterminal
python
## Code Before: import sublime, sublime_plugin import os def wrapped_exec(self, *args, **kwargs): settings = sublime.load_settings("SublimeExterminal.sublime-settings") if settings.get('enabled') and kwargs.get('use_exterminal', True): wrapper = settings.get('exec_wrapper') try: shell_cmd = kwargs.get('shell_cmd', '') shell_cmd = wrapper % shell_cmd.replace('"','\\"') kwargs['shell_cmd'] = shell_cmd except KeyError: pass try: cmd = ' '.join(kwargs.get('cmd')) kwargs['shell_cmd'] = wrapper % cmd.replace('"','\\"') except KeyError: pass return self.run_cached_by_exterminal(*args, **kwargs) def plugin_loaded(): exec_cls = [cls for cls in sublime_plugin.window_command_classes \ if cls.__qualname__=='ExecCommand'][0] if hasattr(exec_cls(None), 'run_cached_by_exterminal'): exec_cls.run = exec_cls.run_cached_by_exterminal exec_cls.run_cached_by_exterminal = None exec_cls.run_cached_by_exterminal = exec_cls.run exec_cls.run = wrapped_exec class StartExterminalCommand(sublime_plugin.WindowCommand): def run(self, *args): settings = sublime.load_settings("SublimeExterminal.sublime-settings") cmd = settings.get('start_exterminal', '') os.popen(cmd) ## Instruction: Fix dangling default in kwargs 'shell_cmd' ## Code After: import sublime, sublime_plugin import os def wrapped_exec(self, *args, **kwargs): settings = sublime.load_settings("SublimeExterminal.sublime-settings") if settings.get('enabled') and kwargs.get('use_exterminal', True): wrapper = settings.get('exec_wrapper') try: shell_cmd = kwargs.get('shell_cmd') shell_cmd = wrapper % shell_cmd.replace('"','\\"') kwargs['shell_cmd'] = shell_cmd except KeyError: pass try: cmd = ' '.join(kwargs.get('cmd')) kwargs['shell_cmd'] = wrapper % cmd.replace('"','\\"') except KeyError: pass return self.run_cached_by_exterminal(*args, **kwargs) def plugin_loaded(): exec_cls = [cls for cls in sublime_plugin.window_command_classes \ if cls.__qualname__=='ExecCommand'][0] if hasattr(exec_cls(None), 'run_cached_by_exterminal'): exec_cls.run = exec_cls.run_cached_by_exterminal exec_cls.run_cached_by_exterminal = None exec_cls.run_cached_by_exterminal = exec_cls.run exec_cls.run = wrapped_exec class StartExterminalCommand(sublime_plugin.WindowCommand): def run(self, *args): settings = sublime.load_settings("SublimeExterminal.sublime-settings") cmd = settings.get('start_exterminal', '') os.popen(cmd)
// ... existing code ... wrapper = settings.get('exec_wrapper') try: shell_cmd = kwargs.get('shell_cmd') shell_cmd = wrapper % shell_cmd.replace('"','\\"') kwargs['shell_cmd'] = shell_cmd except KeyError: pass // ... modified code ... cmd = ' '.join(kwargs.get('cmd')) kwargs['shell_cmd'] = wrapper % cmd.replace('"','\\"') except KeyError: pass return self.run_cached_by_exterminal(*args, **kwargs) // ... rest of the code ...
14a085f787f5fe80a0737d97515b71adaf05d1cd
checker/checker/contest.py
checker/checker/contest.py
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
Fix double-encoding of binary blobs
Fix double-encoding of binary blobs
Python
isc
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
python
## Code Before: from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data) ## Instruction: Fix double-encoding of binary blobs ## Code After: from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
... def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) ...
dcaa940db179cf346f9a0e8f98988d17a3cb2719
core/src/main/kotlin/Kotlin/KotlinAsJavaDescriptorSignatureProvider.kt
core/src/main/kotlin/Kotlin/KotlinAsJavaDescriptorSignatureProvider.kt
package org.jetbrains.dokka.Kotlin import org.jetbrains.dokka.Model.DescriptorSignatureProvider import org.jetbrains.dokka.getSignature import org.jetbrains.dokka.sourcePsi import org.jetbrains.kotlin.asJava.toLightElements import org.jetbrains.kotlin.descriptors.DeclarationDescriptor import org.jetbrains.kotlin.psi.KtElement class KotlinAsJavaDescriptorSignatureProvider : DescriptorSignatureProvider { override fun signature(forDesc: DeclarationDescriptor): String { val sourcePsi = forDesc.sourcePsi() val javaLikePsi = if (sourcePsi is KtElement) { sourcePsi.toLightElements().firstOrNull() } else { sourcePsi } return getSignature(javaLikePsi) ?: throw UnsupportedOperationException("Don't know how to calculate signature for $forDesc") } }
package org.jetbrains.dokka.Kotlin import org.jetbrains.dokka.Model.DescriptorSignatureProvider import org.jetbrains.dokka.getSignature import org.jetbrains.dokka.sourcePsi import org.jetbrains.kotlin.asJava.toLightElements import org.jetbrains.kotlin.descriptors.DeclarationDescriptor import org.jetbrains.kotlin.psi.KtElement class KotlinAsJavaDescriptorSignatureProvider : DescriptorSignatureProvider { override fun signature(forDesc: DeclarationDescriptor): String { val sourcePsi = forDesc.sourcePsi() val javaLikePsi = if (sourcePsi is KtElement) { sourcePsi.toLightElements().firstOrNull() } else { sourcePsi } return getSignature(javaLikePsi) ?: "not implemented for $forDesc with psi: $sourcePsi" } }
Fix crash in as Java mode, just show ugly warning
Fix crash in as Java mode, just show ugly warning
Kotlin
apache-2.0
Kotlin/dokka,google/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,google/dokka,Kotlin/dokka
kotlin
## Code Before: package org.jetbrains.dokka.Kotlin import org.jetbrains.dokka.Model.DescriptorSignatureProvider import org.jetbrains.dokka.getSignature import org.jetbrains.dokka.sourcePsi import org.jetbrains.kotlin.asJava.toLightElements import org.jetbrains.kotlin.descriptors.DeclarationDescriptor import org.jetbrains.kotlin.psi.KtElement class KotlinAsJavaDescriptorSignatureProvider : DescriptorSignatureProvider { override fun signature(forDesc: DeclarationDescriptor): String { val sourcePsi = forDesc.sourcePsi() val javaLikePsi = if (sourcePsi is KtElement) { sourcePsi.toLightElements().firstOrNull() } else { sourcePsi } return getSignature(javaLikePsi) ?: throw UnsupportedOperationException("Don't know how to calculate signature for $forDesc") } } ## Instruction: Fix crash in as Java mode, just show ugly warning ## Code After: package org.jetbrains.dokka.Kotlin import org.jetbrains.dokka.Model.DescriptorSignatureProvider import org.jetbrains.dokka.getSignature import org.jetbrains.dokka.sourcePsi import org.jetbrains.kotlin.asJava.toLightElements import org.jetbrains.kotlin.descriptors.DeclarationDescriptor import org.jetbrains.kotlin.psi.KtElement class KotlinAsJavaDescriptorSignatureProvider : DescriptorSignatureProvider { override fun signature(forDesc: DeclarationDescriptor): String { val sourcePsi = forDesc.sourcePsi() val javaLikePsi = if (sourcePsi is KtElement) { sourcePsi.toLightElements().firstOrNull() } else { sourcePsi } return getSignature(javaLikePsi) ?: "not implemented for $forDesc with psi: $sourcePsi" } }
... } return getSignature(javaLikePsi) ?: "not implemented for $forDesc with psi: $sourcePsi" } } ...
86e52da3cfe7e230ac935b7aa35dcab4b7b23402
web/control/views.py
web/control/views.py
import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from vehicles.models import Vehicle import control.tasks #@api_view(['POST']) @csrf_exempt def handle_control(request, vehicle_vin='-1'): print 'vehicle: ', vehicle_vin try: vehicle = Vehicle.objects.filter(veh_vin=vehicle_vin)[0] if request.method == 'POST': received_json_data = json.loads(request.body) print 'received json data', received_json_data try: command = received_json_data['command'] print 'command: ', command try: control.tasks.handle_control(vehicle, command) response_data = str(received_json_data) return HttpResponse(response_data, content_type="application/json") except Exception as e: print str(e) return HttpResponse('Send control command failed', content_type="plain/text") except: return HttpResponse('Invalid command format', content_type="plain/text") except: return HttpResponse('No valid vehicle found.', content_type="plain/text")
import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from vehicles.models import Vehicle import control.tasks #@api_view(['POST']) @csrf_exempt def handle_control(request, vehicle_vin='-1'): print 'vehicle: ', vehicle_vin try: vehicle = Vehicle.objects.filter(veh_vin=vehicle_vin)[0] if request.method == 'POST': try: received_json_data = json.loads(request.body) print 'received json data', received_json_data command = received_json_data['command'] print 'command: ', command try: control.tasks.handle_control(vehicle, command) response_data = str(received_json_data) return HttpResponse(response_data, content_type="application/json") except Exception as e: print str(e) return HttpResponse('Send control command failed', content_type="plain/text") except: return HttpResponse('Invalid control message format', content_type="plain/text") else: return HttpResponse('POST action is expected', content_type="plain/text") except: return HttpResponse('No valid vehicle found.', content_type="plain/text")
Improve the error handling and error response message.
Improve the error handling and error response message.
Python
mpl-2.0
klooer/rvi_backend,klooer/rvi_backend,klooer/rvi_backend,klooer/rvi_backend
python
## Code Before: import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from vehicles.models import Vehicle import control.tasks #@api_view(['POST']) @csrf_exempt def handle_control(request, vehicle_vin='-1'): print 'vehicle: ', vehicle_vin try: vehicle = Vehicle.objects.filter(veh_vin=vehicle_vin)[0] if request.method == 'POST': received_json_data = json.loads(request.body) print 'received json data', received_json_data try: command = received_json_data['command'] print 'command: ', command try: control.tasks.handle_control(vehicle, command) response_data = str(received_json_data) return HttpResponse(response_data, content_type="application/json") except Exception as e: print str(e) return HttpResponse('Send control command failed', content_type="plain/text") except: return HttpResponse('Invalid command format', content_type="plain/text") except: return HttpResponse('No valid vehicle found.', content_type="plain/text") ## Instruction: Improve the error handling and error response message. ## Code After: import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from vehicles.models import Vehicle import control.tasks #@api_view(['POST']) @csrf_exempt def handle_control(request, vehicle_vin='-1'): print 'vehicle: ', vehicle_vin try: vehicle = Vehicle.objects.filter(veh_vin=vehicle_vin)[0] if request.method == 'POST': try: received_json_data = json.loads(request.body) print 'received json data', received_json_data command = received_json_data['command'] print 'command: ', command try: control.tasks.handle_control(vehicle, command) response_data = str(received_json_data) return HttpResponse(response_data, content_type="application/json") except Exception as e: print str(e) return HttpResponse('Send control command failed', content_type="plain/text") except: return HttpResponse('Invalid control message format', content_type="plain/text") else: return HttpResponse('POST action is expected', content_type="plain/text") except: return HttpResponse('No valid vehicle found.', content_type="plain/text")
// ... existing code ... vehicle = Vehicle.objects.filter(veh_vin=vehicle_vin)[0] if request.method == 'POST': try: received_json_data = json.loads(request.body) print 'received json data', received_json_data command = received_json_data['command'] print 'command: ', command try: // ... modified code ... print str(e) return HttpResponse('Send control command failed', content_type="plain/text") except: return HttpResponse('Invalid control message format', content_type="plain/text") else: return HttpResponse('POST action is expected', content_type="plain/text") except: return HttpResponse('No valid vehicle found.', content_type="plain/text") // ... rest of the code ...
69b0e1c60eafff596ebb494a7e79a22c6bea374b
polling_stations/apps/data_collection/management/commands/import_hart.py
polling_stations/apps/data_collection/management/commands/import_hart.py
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E07000089' addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' elections = ['parl.2017-06-08'] csv_delimiter = '\t'
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E07000089' addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' elections = ['parl.2017-06-08'] csv_delimiter = '\t' def station_record_to_dict(self, record): if record.polling_place_id == '1914': record = record._replace(polling_place_easting = '479224') record = record._replace(polling_place_northing = '154016') return super().station_record_to_dict(record)
Fix dodgy point in Hart
Fix dodgy point in Hart
Python
bsd-3-clause
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
python
## Code Before: from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E07000089' addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' elections = ['parl.2017-06-08'] csv_delimiter = '\t' ## Instruction: Fix dodgy point in Hart ## Code After: from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E07000089' addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' elections = ['parl.2017-06-08'] csv_delimiter = '\t' def station_record_to_dict(self, record): if record.polling_place_id == '1914': record = record._replace(polling_place_easting = '479224') record = record._replace(polling_place_northing = '154016') return super().station_record_to_dict(record)
// ... existing code ... stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV' elections = ['parl.2017-06-08'] csv_delimiter = '\t' def station_record_to_dict(self, record): if record.polling_place_id == '1914': record = record._replace(polling_place_easting = '479224') record = record._replace(polling_place_northing = '154016') return super().station_record_to_dict(record) // ... rest of the code ...
94d47cfc6db684beda275f8658660a3bd92b319d
src/syft/grid/client/request_api/user_api.py
src/syft/grid/client/request_api/user_api.py
from typing import Any from typing import Dict # third party from pandas import DataFrame # syft relative from ...messages.user_messages import CreateUserMessage from ...messages.user_messages import DeleteUserMessage from ...messages.user_messages import GetUserMessage from ...messages.user_messages import GetUsersMessage from ...messages.user_messages import UpdateUserMessage from .request_api import GridRequestAPI class UserRequestAPI(GridRequestAPI): response_key = "user" def __init__(self, send): super().__init__( create_msg=CreateUserMessage, get_msg=GetUserMessage, get_all_msg=GetUsersMessage, update_msg=UpdateUserMessage, delete_msg=DeleteUserMessage, send=send, response_key=UserRequestAPI.response_key, ) def __getitem__(self, key): return self.get(user_id=key) def __delitem__(self, key): self.delete(user_id=key)
from typing import Any from typing import Callable # syft relative from ...messages.user_messages import CreateUserMessage from ...messages.user_messages import DeleteUserMessage from ...messages.user_messages import GetUserMessage from ...messages.user_messages import GetUsersMessage from ...messages.user_messages import UpdateUserMessage from .request_api import GridRequestAPI class UserRequestAPI(GridRequestAPI): response_key = "user" def __init__(self, send: Callable): super().__init__( create_msg=CreateUserMessage, get_msg=GetUserMessage, get_all_msg=GetUsersMessage, update_msg=UpdateUserMessage, delete_msg=DeleteUserMessage, send=send, response_key=UserRequestAPI.response_key, ) def __getitem__(self, key: int) -> Any: return self.get(user_id=key) def __delitem__(self, key: int) -> None: self.delete(user_id=key)
Update User API - ADD type hints - Remove unused imports
Update User API - ADD type hints - Remove unused imports
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
python
## Code Before: from typing import Any from typing import Dict # third party from pandas import DataFrame # syft relative from ...messages.user_messages import CreateUserMessage from ...messages.user_messages import DeleteUserMessage from ...messages.user_messages import GetUserMessage from ...messages.user_messages import GetUsersMessage from ...messages.user_messages import UpdateUserMessage from .request_api import GridRequestAPI class UserRequestAPI(GridRequestAPI): response_key = "user" def __init__(self, send): super().__init__( create_msg=CreateUserMessage, get_msg=GetUserMessage, get_all_msg=GetUsersMessage, update_msg=UpdateUserMessage, delete_msg=DeleteUserMessage, send=send, response_key=UserRequestAPI.response_key, ) def __getitem__(self, key): return self.get(user_id=key) def __delitem__(self, key): self.delete(user_id=key) ## Instruction: Update User API - ADD type hints - Remove unused imports ## Code After: from typing import Any from typing import Callable # syft relative from ...messages.user_messages import CreateUserMessage from ...messages.user_messages import DeleteUserMessage from ...messages.user_messages import GetUserMessage from ...messages.user_messages import GetUsersMessage from ...messages.user_messages import UpdateUserMessage from .request_api import GridRequestAPI class UserRequestAPI(GridRequestAPI): response_key = "user" def __init__(self, send: Callable): super().__init__( create_msg=CreateUserMessage, get_msg=GetUserMessage, get_all_msg=GetUsersMessage, update_msg=UpdateUserMessage, delete_msg=DeleteUserMessage, send=send, response_key=UserRequestAPI.response_key, ) def __getitem__(self, key: int) -> Any: return self.get(user_id=key) def __delitem__(self, key: int) -> None: self.delete(user_id=key)
... from typing import Any from typing import Callable # syft relative from ...messages.user_messages import CreateUserMessage ... class UserRequestAPI(GridRequestAPI): response_key = "user" def __init__(self, send: Callable): super().__init__( create_msg=CreateUserMessage, get_msg=GetUserMessage, ... response_key=UserRequestAPI.response_key, ) def __getitem__(self, key: int) -> Any: return self.get(user_id=key) def __delitem__(self, key: int) -> None: self.delete(user_id=key) ...
9305caf0bf2479b098703c8c7fb3b139f95576ec
vectorTiling.py
vectorTiling.py
import json import os from urlparse import urlparse import zipfile import click import adapters from filters import BasicFilterer import utils import subprocess @click.command() @click.argument('file', type=click.Path(exists=True), required=True) def vectorTiling(file): """ Function that creates vector tiles INPUT: geojson file generated by process.py OUTPUT: mbtiles file containing vector tiles """ # Variables to change in production path = '/Users/athissen/Documents/' min_zoom = 0 max_zoom = 14 paths_string = '' with open(file, 'rb') as f: geojson = json.load(f) features = geojson['features'] for item in features: paths_string += path + item['properties']['path'] + ' ' command = 'tippecanoe -f -o ' + 'result.mbtiles ' + paths_string + ' -z {} -Z {}'.format(max_zoom, min_zoom) subprocess.call(command,shell=True) if __name__ == '__main__': vectorTiling()
import json import click import subprocess import utils import os import logging @click.command() @click.argument('sources', type=click.Path(exists=True), required=True) @click.argument('output', type=click.Path(exists=True), required=True) @click.argument('min_zoom', default=5) @click.argument('max_zoom', default=14) def vectorTiling(sources, output, min_zoom, max_zoom): """ Function that creates vector tiles PARAMS: - sources : directory where the geojson file(s) are - output : directory for the generated data """ files = [] for file in utils.get_files(sources): if not os.path.isdir(file): if file.split('.')[1] == 'geojson': files.append(file) logging.info("{} geojson found".format(len(files))) paths_string = '' for file in files: with open(file, 'rb') as f: geojson = json.load(f) features = geojson['features'] for item in features: paths_string += item['properties']['path'] + ' ' command = 'tippecanoe -f -o ' + output + '/result.mbtiles ' + paths_string + ' -z {} -Z {}'.format(max_zoom, min_zoom) subprocess.call(command,shell=True) if __name__ == '__main__': vectorTiling()
Change of arguments, remove unused modules, source is now a directory
Change of arguments, remove unused modules, source is now a directory
Python
mit
OpenBounds/Processing
python
## Code Before: import json import os from urlparse import urlparse import zipfile import click import adapters from filters import BasicFilterer import utils import subprocess @click.command() @click.argument('file', type=click.Path(exists=True), required=True) def vectorTiling(file): """ Function that creates vector tiles INPUT: geojson file generated by process.py OUTPUT: mbtiles file containing vector tiles """ # Variables to change in production path = '/Users/athissen/Documents/' min_zoom = 0 max_zoom = 14 paths_string = '' with open(file, 'rb') as f: geojson = json.load(f) features = geojson['features'] for item in features: paths_string += path + item['properties']['path'] + ' ' command = 'tippecanoe -f -o ' + 'result.mbtiles ' + paths_string + ' -z {} -Z {}'.format(max_zoom, min_zoom) subprocess.call(command,shell=True) if __name__ == '__main__': vectorTiling() ## Instruction: Change of arguments, remove unused modules, source is now a directory ## Code After: import json import click import subprocess import utils import os import logging @click.command() @click.argument('sources', type=click.Path(exists=True), required=True) @click.argument('output', type=click.Path(exists=True), required=True) @click.argument('min_zoom', default=5) @click.argument('max_zoom', default=14) def vectorTiling(sources, output, min_zoom, max_zoom): """ Function that creates vector tiles PARAMS: - sources : directory where the geojson file(s) are - output : directory for the generated data """ files = [] for file in utils.get_files(sources): if not os.path.isdir(file): if file.split('.')[1] == 'geojson': files.append(file) logging.info("{} geojson found".format(len(files))) paths_string = '' for file in files: with open(file, 'rb') as f: geojson = json.load(f) features = geojson['features'] for item in features: paths_string += item['properties']['path'] + ' ' command = 'tippecanoe -f -o ' + output + '/result.mbtiles ' + paths_string + ' -z {} -Z {}'.format(max_zoom, min_zoom) subprocess.call(command,shell=True) if __name__ == '__main__': vectorTiling()
# ... existing code ... import json import click import subprocess import utils import os import logging @click.command() @click.argument('sources', type=click.Path(exists=True), required=True) @click.argument('output', type=click.Path(exists=True), required=True) @click.argument('min_zoom', default=5) @click.argument('max_zoom', default=14) def vectorTiling(sources, output, min_zoom, max_zoom): """ Function that creates vector tiles PARAMS: - sources : directory where the geojson file(s) are - output : directory for the generated data """ files = [] for file in utils.get_files(sources): if not os.path.isdir(file): if file.split('.')[1] == 'geojson': files.append(file) logging.info("{} geojson found".format(len(files))) paths_string = '' for file in files: with open(file, 'rb') as f: geojson = json.load(f) features = geojson['features'] for item in features: paths_string += item['properties']['path'] + ' ' command = 'tippecanoe -f -o ' + output + '/result.mbtiles ' + paths_string + ' -z {} -Z {}'.format(max_zoom, min_zoom) subprocess.call(command,shell=True) if __name__ == '__main__': # ... rest of the code ...