commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
5b03b6a7f064fccef22c9becba574abafe64f29b
app/src/main/java/com/platypii/baseline/views/charts/TimeChartTouchable.java
app/src/main/java/com/platypii/baseline/views/charts/TimeChartTouchable.java
package com.platypii.baseline.views.charts; import com.platypii.baseline.events.ChartFocusEvent; import com.platypii.baseline.measurements.MLocation; import android.content.Context; import android.util.AttributeSet; import android.view.MotionEvent; import java.util.Collections; import org.greenrobot.eventbus.EventBus; public class TimeChartTouchable extends TimeChart { public TimeChartTouchable(Context context, AttributeSet attrs) { super(context, attrs); } @Override public boolean onTouchEvent(MotionEvent event) { super.onTouchEvent(event); if (event.getAction() == MotionEvent.ACTION_MOVE) { final long millis = (long) plot.getXinverse(0, event.getX()); // Find nearest data point final MLocation closest = findClosest(millis); // Emit chart focus event EventBus.getDefault().post(new ChartFocusEvent(closest)); } return true; // if the event was handled } // Avoid creating new object just to binary search private final MLocation touchLocation = new MLocation(); /** * Performs a binary search for the nearest data point */ private MLocation findClosest(long millis) { touchLocation.millis = millis; int closest_index = Collections.binarySearch(trackData, touchLocation); if (closest_index < 0) closest_index = -closest_index; if (closest_index == trackData.size()) closest_index--; return trackData.get(closest_index); } }
package com.platypii.baseline.views.charts; import com.platypii.baseline.events.ChartFocusEvent; import com.platypii.baseline.measurements.MLocation; import android.content.Context; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.MotionEvent; import java.util.Collections; import org.greenrobot.eventbus.EventBus; public class TimeChartTouchable extends TimeChart { public TimeChartTouchable(Context context, AttributeSet attrs) { super(context, attrs); } @Override public boolean onTouchEvent(MotionEvent event) { super.onTouchEvent(event); if (event.getAction() == MotionEvent.ACTION_MOVE) { final long millis = (long) plot.getXinverse(0, event.getX()); // Find nearest data point final MLocation closest = findClosest(millis); // Emit chart focus event EventBus.getDefault().post(new ChartFocusEvent(closest)); } return true; // if the event was handled } // Avoid creating new object just to binary search private final MLocation touchLocation = new MLocation(); /** * Performs a binary search for the nearest data point */ @Nullable private MLocation findClosest(long millis) { if (trackData != null && !trackData.isEmpty()) { touchLocation.millis = millis; int closest_index = Collections.binarySearch(trackData, touchLocation); if (closest_index < 0) closest_index = -closest_index; if (closest_index == trackData.size()) closest_index--; return trackData.get(closest_index); } else { return null; } } }
Fix null pointer in time chart
Fix null pointer in time chart
Java
mit
platypii/BASElineFlightComputer,platypii/BASElineFlightComputer
java
## Code Before: package com.platypii.baseline.views.charts; import com.platypii.baseline.events.ChartFocusEvent; import com.platypii.baseline.measurements.MLocation; import android.content.Context; import android.util.AttributeSet; import android.view.MotionEvent; import java.util.Collections; import org.greenrobot.eventbus.EventBus; public class TimeChartTouchable extends TimeChart { public TimeChartTouchable(Context context, AttributeSet attrs) { super(context, attrs); } @Override public boolean onTouchEvent(MotionEvent event) { super.onTouchEvent(event); if (event.getAction() == MotionEvent.ACTION_MOVE) { final long millis = (long) plot.getXinverse(0, event.getX()); // Find nearest data point final MLocation closest = findClosest(millis); // Emit chart focus event EventBus.getDefault().post(new ChartFocusEvent(closest)); } return true; // if the event was handled } // Avoid creating new object just to binary search private final MLocation touchLocation = new MLocation(); /** * Performs a binary search for the nearest data point */ private MLocation findClosest(long millis) { touchLocation.millis = millis; int closest_index = Collections.binarySearch(trackData, touchLocation); if (closest_index < 0) closest_index = -closest_index; if (closest_index == trackData.size()) closest_index--; return trackData.get(closest_index); } } ## Instruction: Fix null pointer in time chart ## Code After: package com.platypii.baseline.views.charts; import com.platypii.baseline.events.ChartFocusEvent; import com.platypii.baseline.measurements.MLocation; import android.content.Context; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.MotionEvent; import java.util.Collections; import org.greenrobot.eventbus.EventBus; public class TimeChartTouchable extends TimeChart { public TimeChartTouchable(Context context, AttributeSet attrs) { super(context, attrs); } @Override public boolean onTouchEvent(MotionEvent event) { super.onTouchEvent(event); if (event.getAction() == MotionEvent.ACTION_MOVE) { final long millis = (long) plot.getXinverse(0, event.getX()); // Find nearest data point final MLocation closest = findClosest(millis); // Emit chart focus event EventBus.getDefault().post(new ChartFocusEvent(closest)); } return true; // if the event was handled } // Avoid creating new object just to binary search private final MLocation touchLocation = new MLocation(); /** * Performs a binary search for the nearest data point */ @Nullable private MLocation findClosest(long millis) { if (trackData != null && !trackData.isEmpty()) { touchLocation.millis = millis; int closest_index = Collections.binarySearch(trackData, touchLocation); if (closest_index < 0) closest_index = -closest_index; if (closest_index == trackData.size()) closest_index--; return trackData.get(closest_index); } else { return null; } } }
... import com.platypii.baseline.events.ChartFocusEvent; import com.platypii.baseline.measurements.MLocation; import android.content.Context; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.MotionEvent; import java.util.Collections; ... /** * Performs a binary search for the nearest data point */ @Nullable private MLocation findClosest(long millis) { if (trackData != null && !trackData.isEmpty()) { touchLocation.millis = millis; int closest_index = Collections.binarySearch(trackData, touchLocation); if (closest_index < 0) closest_index = -closest_index; if (closest_index == trackData.size()) closest_index--; return trackData.get(closest_index); } else { return null; } } } ...
599d093c766938f631f2142249d86b32b851ef82
src/main/java/org/jenkinsci/plugins/koji/XMLRPCTest.java
src/main/java/org/jenkinsci/plugins/koji/XMLRPCTest.java
package org.jenkinsci.plugins.koji; import java.util.Map; public class XMLRPCTest { private KojiClient koji; public static void main(String[] args) { String kojiInstanceURL = "http://koji.fedoraproject.org/kojihub"; XMLRPCTest kojiTest = new XMLRPCTest(kojiInstanceURL); kojiTest.test(); } public void test() { String hello = koji.sayHello(); System.out.println(hello); Object[] result; result = koji.getLatestBuilds("f21", "kernel"); for (Object object : result) { System.out.println(object); } String build = "kernel-3.15.0-0.rc3.git5.3.fc21"; Map<String, String> buildInfo; buildInfo = koji.getBuildInfo(build); for (Map.Entry<String, String> entry : buildInfo.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); System.out.println(key + ": " + value); } } public XMLRPCTest(String kojiInstanceURL) { this.koji = KojiClient.getKojiClient(kojiInstanceURL); } }
package org.jenkinsci.plugins.koji; import java.util.Map; public class XMLRPCTest { private KojiClient koji; public static void main(String[] args) { String kojiInstanceURL = "http://koji.fedoraproject.org/kojihub"; XMLRPCTest kojiTest = new XMLRPCTest(kojiInstanceURL); kojiTest.executeTests(); } public void executeTests() { testKojiHello(); // koji.getSession(); // // testGetLatestBuilds(); // // testGeBuildInfo(); } private void testKojiHello() { String hello = koji.sayHello(); System.out.println(hello); } private void testGeBuildInfo() { String build = "kernel-3.15.0-0.rc3.git5.3.fc21"; Map<String, String> buildInfo; buildInfo = koji.getBuildInfo(build); for (Map.Entry<String, String> entry : buildInfo.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); System.out.println(key + ": " + value); } } private void testGetLatestBuilds() { Object[] result; result = koji.getLatestBuilds("f21", "kernel"); for (Object object : result) { System.out.println(object); } } public XMLRPCTest(String kojiInstanceURL) { this.koji = KojiClient.getKojiClient(kojiInstanceURL); } }
Refactor the XML-RPC testing class.
Refactor the XML-RPC testing class.
Java
mit
jenkinsci/koji-plugin,jenkinsci/koji-plugin,vtunka/jenkins-koji-plugin
java
## Code Before: package org.jenkinsci.plugins.koji; import java.util.Map; public class XMLRPCTest { private KojiClient koji; public static void main(String[] args) { String kojiInstanceURL = "http://koji.fedoraproject.org/kojihub"; XMLRPCTest kojiTest = new XMLRPCTest(kojiInstanceURL); kojiTest.test(); } public void test() { String hello = koji.sayHello(); System.out.println(hello); Object[] result; result = koji.getLatestBuilds("f21", "kernel"); for (Object object : result) { System.out.println(object); } String build = "kernel-3.15.0-0.rc3.git5.3.fc21"; Map<String, String> buildInfo; buildInfo = koji.getBuildInfo(build); for (Map.Entry<String, String> entry : buildInfo.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); System.out.println(key + ": " + value); } } public XMLRPCTest(String kojiInstanceURL) { this.koji = KojiClient.getKojiClient(kojiInstanceURL); } } ## Instruction: Refactor the XML-RPC testing class. ## Code After: package org.jenkinsci.plugins.koji; import java.util.Map; public class XMLRPCTest { private KojiClient koji; public static void main(String[] args) { String kojiInstanceURL = "http://koji.fedoraproject.org/kojihub"; XMLRPCTest kojiTest = new XMLRPCTest(kojiInstanceURL); kojiTest.executeTests(); } public void executeTests() { testKojiHello(); // koji.getSession(); // // testGetLatestBuilds(); // // testGeBuildInfo(); } private void testKojiHello() { String hello = koji.sayHello(); System.out.println(hello); } private void testGeBuildInfo() { String build = "kernel-3.15.0-0.rc3.git5.3.fc21"; Map<String, String> buildInfo; buildInfo = koji.getBuildInfo(build); for (Map.Entry<String, String> entry : buildInfo.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); System.out.println(key + ": " + value); } } private void testGetLatestBuilds() { Object[] result; result = koji.getLatestBuilds("f21", "kernel"); for (Object object : result) { System.out.println(object); } } public XMLRPCTest(String kojiInstanceURL) { this.koji = KojiClient.getKojiClient(kojiInstanceURL); } }
// ... existing code ... String kojiInstanceURL = "http://koji.fedoraproject.org/kojihub"; XMLRPCTest kojiTest = new XMLRPCTest(kojiInstanceURL); kojiTest.executeTests(); } public void executeTests() { testKojiHello(); // koji.getSession(); // // testGetLatestBuilds(); // // testGeBuildInfo(); } private void testKojiHello() { String hello = koji.sayHello(); System.out.println(hello); } private void testGeBuildInfo() { String build = "kernel-3.15.0-0.rc3.git5.3.fc21"; Map<String, String> buildInfo; buildInfo = koji.getBuildInfo(build); // ... modified code ... } } private void testGetLatestBuilds() { Object[] result; result = koji.getLatestBuilds("f21", "kernel"); for (Object object : result) { System.out.println(object); } } public XMLRPCTest(String kojiInstanceURL) { this.koji = KojiClient.getKojiClient(kojiInstanceURL); } // ... rest of the code ...
34c6c66b706816546c68fcf842ece81299765dc4
Primes/src/primes/bramble/SlaveNodeRunner.java
Primes/src/primes/bramble/SlaveNodeRunner.java
package primes.bramble; import java.io.Serializable; import java.util.ArrayList; import primes.PrimeGenerator; import bramble.node.slave.ISlaveNodeRunner; import bramble.node.slave.SlaveNode; /** * This is an example of how a SlaveNode can be run. * * This class is responsible for passing a job from the * API to a class that will accept a job. * * @author Tom * */ public class SlaveNodeRunner implements ISlaveNodeRunner{ private static String IPADDR; @SuppressWarnings("unused") public static void main(String[] args){ if(args.length != 1){ System.out.println("Didn't find an IP on the command line, exiting"); System.exit(1); } else { IPADDR = args[0]; System.out.println("Slave node initiated, my IP is " + IPADDR); } new SlaveNodeRunner(); } public SlaveNodeRunner(){ (new SlaveNode<>(IPADDR, this)).listenForever(); } @Override public void runJob(int jobID, ArrayList<Serializable> initializationData) { //System.out.print("[" + jobID + "] "); PrimeGenerator primeGenerator = new PrimeGenerator(IPADDR, jobID, initializationData); primeGenerator.run(); } }
package primes.bramble; import java.io.Serializable; import java.util.ArrayList; import primes.PrimeGenerator; import bramble.node.slave.ISlaveNodeRunner; import bramble.node.slave.SlaveNode; /** * This is an example of how a SlaveNode can be run. * * This class is responsible for passing a job from the * API to a class that will accept a job. * * @author Tom * */ public class SlaveNodeRunner implements ISlaveNodeRunner{ private final String ipAddress; public static void main(String[] args){ if(args.length != 1){ System.out.println("Didn't find an IP on the command line, exiting"); return; } String ipAddress = args[0]; System.out.println("Slave node initiated, my IP is " + ipAddress); (new SlaveNodeRunner(ipAddress)).initialize(); } public SlaveNodeRunner(String ipAddress){ this.ipAddress = ipAddress; } public void initialize(){ (new SlaveNode<>(this.ipAddress, this)).listenForever(); } @Override public void runJob(int jobID, ArrayList<Serializable> initializationData) { //System.out.print("[" + jobID + "] "); PrimeGenerator primeGenerator = new PrimeGenerator(this.ipAddress, jobID, initializationData); primeGenerator.run(); } }
Refactor static variables out of slave node runner
Refactor static variables out of slave node runner
Java
apache-2.0
Tom-Willemsen/PicoBramble,Tom-Willemsen/PicoBramble,Tom-Willemsen/PicoBramble
java
## Code Before: package primes.bramble; import java.io.Serializable; import java.util.ArrayList; import primes.PrimeGenerator; import bramble.node.slave.ISlaveNodeRunner; import bramble.node.slave.SlaveNode; /** * This is an example of how a SlaveNode can be run. * * This class is responsible for passing a job from the * API to a class that will accept a job. * * @author Tom * */ public class SlaveNodeRunner implements ISlaveNodeRunner{ private static String IPADDR; @SuppressWarnings("unused") public static void main(String[] args){ if(args.length != 1){ System.out.println("Didn't find an IP on the command line, exiting"); System.exit(1); } else { IPADDR = args[0]; System.out.println("Slave node initiated, my IP is " + IPADDR); } new SlaveNodeRunner(); } public SlaveNodeRunner(){ (new SlaveNode<>(IPADDR, this)).listenForever(); } @Override public void runJob(int jobID, ArrayList<Serializable> initializationData) { //System.out.print("[" + jobID + "] "); PrimeGenerator primeGenerator = new PrimeGenerator(IPADDR, jobID, initializationData); primeGenerator.run(); } } ## Instruction: Refactor static variables out of slave node runner ## Code After: package primes.bramble; import java.io.Serializable; import java.util.ArrayList; import primes.PrimeGenerator; import bramble.node.slave.ISlaveNodeRunner; import bramble.node.slave.SlaveNode; /** * This is an example of how a SlaveNode can be run. * * This class is responsible for passing a job from the * API to a class that will accept a job. * * @author Tom * */ public class SlaveNodeRunner implements ISlaveNodeRunner{ private final String ipAddress; public static void main(String[] args){ if(args.length != 1){ System.out.println("Didn't find an IP on the command line, exiting"); return; } String ipAddress = args[0]; System.out.println("Slave node initiated, my IP is " + ipAddress); (new SlaveNodeRunner(ipAddress)).initialize(); } public SlaveNodeRunner(String ipAddress){ this.ipAddress = ipAddress; } public void initialize(){ (new SlaveNode<>(this.ipAddress, this)).listenForever(); } @Override public void runJob(int jobID, ArrayList<Serializable> initializationData) { //System.out.print("[" + jobID + "] "); PrimeGenerator primeGenerator = new PrimeGenerator(this.ipAddress, jobID, initializationData); primeGenerator.run(); } }
... */ public class SlaveNodeRunner implements ISlaveNodeRunner{ private final String ipAddress; public static void main(String[] args){ if(args.length != 1){ System.out.println("Didn't find an IP on the command line, exiting"); return; } String ipAddress = args[0]; System.out.println("Slave node initiated, my IP is " + ipAddress); (new SlaveNodeRunner(ipAddress)).initialize(); } public SlaveNodeRunner(String ipAddress){ this.ipAddress = ipAddress; } public void initialize(){ (new SlaveNode<>(this.ipAddress, this)).listenForever(); } @Override public void runJob(int jobID, ArrayList<Serializable> initializationData) { //System.out.print("[" + jobID + "] "); PrimeGenerator primeGenerator = new PrimeGenerator(this.ipAddress, jobID, initializationData); primeGenerator.run(); } ...
fd909f383ab8a930c8a858144e0566075821f019
tests/test_search.py
tests/test_search.py
from sharepa.search import ShareSearch from sharepa.search import basic_search import elasticsearch_dsl import types def test_basic_search(): results = basic_search.execute() assert results.hits assert results.aggregations def test_no_title_search(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='NOT title:*', analyze_wildcard=True ) results = my_search.execute() for result in results: assert not result.get('title') def test_execute(): my_search = ShareSearch() result = my_search.execute() assert isinstance(result, elasticsearch_dsl.result.Response) def test_count(): count = basic_search.count() assert isinstance(count, int) def test_query(): assert isinstance(basic_search._query(basic_search.to_dict()), dict) def test_scan(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='science AND cows AND milk' ) scan = my_search.scan() scan_list = [item for item in scan] assert isinstance(scan, types.GeneratorType) assert scan_list
from sharepa.search import ShareSearch from sharepa.search import basic_search import vcr import types import elasticsearch_dsl def test_basic_search(): results = basic_search.execute() assert results.hits assert results.aggregations def test_no_title_search(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='NOT title:*', analyze_wildcard=True ) results = my_search.execute() for result in results: assert not result.get('title') @vcr.use_cassette('tests/vcr/simple_execute.yaml') def test_execute(): my_search = ShareSearch() result = my_search.execute() first_result = result.hits[0].to_dict() assert len(result.hits) == 10 assert result.to_dict().keys() == ['hits', '_shards', 'took', 'timed_out', 'time'] assert isinstance(result, elasticsearch_dsl.result.Response) assert first_result['title'] == 'Avian community structure and incidence of human West Nile infection' def test_count(): count = basic_search.count() assert isinstance(count, int) def test_query(): assert isinstance(basic_search._query(basic_search.to_dict()), dict) @vcr.use_cassette('tests/vcr/scan.yaml') def test_scan(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='squared AND circle' ) scan = my_search.scan() scan_list = [item for item in scan] assert len(scan_list) == 3 assert scan_list[0].title == '<p>The ellipsoids in the figure are isolines of constant density of bivariate Gaussian distributions.</p>'
Add vcr to scan test
Add vcr to scan test
Python
mit
fabianvf/sharepa,erinspace/sharepa,samanehsan/sharepa,CenterForOpenScience/sharepa
python
## Code Before: from sharepa.search import ShareSearch from sharepa.search import basic_search import elasticsearch_dsl import types def test_basic_search(): results = basic_search.execute() assert results.hits assert results.aggregations def test_no_title_search(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='NOT title:*', analyze_wildcard=True ) results = my_search.execute() for result in results: assert not result.get('title') def test_execute(): my_search = ShareSearch() result = my_search.execute() assert isinstance(result, elasticsearch_dsl.result.Response) def test_count(): count = basic_search.count() assert isinstance(count, int) def test_query(): assert isinstance(basic_search._query(basic_search.to_dict()), dict) def test_scan(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='science AND cows AND milk' ) scan = my_search.scan() scan_list = [item for item in scan] assert isinstance(scan, types.GeneratorType) assert scan_list ## Instruction: Add vcr to scan test ## Code After: from sharepa.search import ShareSearch from sharepa.search import basic_search import vcr import types import elasticsearch_dsl def test_basic_search(): results = basic_search.execute() assert results.hits assert results.aggregations def test_no_title_search(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='NOT title:*', analyze_wildcard=True ) results = my_search.execute() for result in results: assert not result.get('title') @vcr.use_cassette('tests/vcr/simple_execute.yaml') def test_execute(): my_search = ShareSearch() result = my_search.execute() first_result = result.hits[0].to_dict() assert len(result.hits) == 10 assert result.to_dict().keys() == ['hits', '_shards', 'took', 'timed_out', 'time'] assert isinstance(result, elasticsearch_dsl.result.Response) assert first_result['title'] == 'Avian community structure and incidence of human West Nile infection' def test_count(): count = basic_search.count() assert isinstance(count, int) def test_query(): assert isinstance(basic_search._query(basic_search.to_dict()), dict) @vcr.use_cassette('tests/vcr/scan.yaml') def test_scan(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='squared AND circle' ) scan = my_search.scan() scan_list = [item for item in scan] assert len(scan_list) == 3 assert scan_list[0].title == '<p>The ellipsoids in the figure are isolines of constant density of bivariate Gaussian distributions.</p>'
# ... existing code ... from sharepa.search import ShareSearch from sharepa.search import basic_search import vcr import types import elasticsearch_dsl def test_basic_search(): # ... modified code ... assert not result.get('title') @vcr.use_cassette('tests/vcr/simple_execute.yaml') def test_execute(): my_search = ShareSearch() result = my_search.execute() first_result = result.hits[0].to_dict() assert len(result.hits) == 10 assert result.to_dict().keys() == ['hits', '_shards', 'took', 'timed_out', 'time'] assert isinstance(result, elasticsearch_dsl.result.Response) assert first_result['title'] == 'Avian community structure and incidence of human West Nile infection' def test_count(): ... assert isinstance(basic_search._query(basic_search.to_dict()), dict) @vcr.use_cassette('tests/vcr/scan.yaml') def test_scan(): my_search = ShareSearch() my_search = my_search.query( 'query_string', query='squared AND circle' ) scan = my_search.scan() scan_list = [item for item in scan] assert len(scan_list) == 3 assert scan_list[0].title == '<p>The ellipsoids in the figure are isolines of constant density of bivariate Gaussian distributions.</p>' # ... rest of the code ...
d81dbd7b25cd44f730e979efe03eb6e5e1d87f1b
admin/commandRunner.py
admin/commandRunner.py
import configparser import sys import os parser = configparser.ConfigParser() parser.read("../halite.ini") WORKERS = dict(parser.items("workerIPs")) command = sys.argv[1] print(command) for name in WORKERS: print("########"+name+"########") print(WORKERS[name]) os.system("ssh root@"+WORKERS[name]+" '"+command+"'") print("################\n")
import pymysql import configparser import sys import os import os.path parser = configparser.ConfigParser() parser.read("../halite.ini") DB_CONFIG = parser["database"] keyPath = os.path.join("../", parser["aws"]["keyfilepath"]) db = pymysql.connect(host=DB_CONFIG["hostname"], user=DB_CONFIG['username'], passwd=DB_CONFIG['password'], db=DB_CONFIG['name'], cursorclass=pymysql.cursors.DictCursor) cursor = db.cursor() cursor.execute("select * from Worker") workers = cursor.fetchall() command = sys.argv[1] for worker in workers: print("########"+worker['ipAddress']+"########") os.system("ssh -i \""+keyPath+"\" ubuntu@"+worker['ipAddress']+" '"+command+"'") print("################\n")
Switch command runner to using db
Switch command runner to using db
Python
mit
HaliteChallenge/Halite,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite,lanyudhy/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite,lanyudhy/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II
python
## Code Before: import configparser import sys import os parser = configparser.ConfigParser() parser.read("../halite.ini") WORKERS = dict(parser.items("workerIPs")) command = sys.argv[1] print(command) for name in WORKERS: print("########"+name+"########") print(WORKERS[name]) os.system("ssh root@"+WORKERS[name]+" '"+command+"'") print("################\n") ## Instruction: Switch command runner to using db ## Code After: import pymysql import configparser import sys import os import os.path parser = configparser.ConfigParser() parser.read("../halite.ini") DB_CONFIG = parser["database"] keyPath = os.path.join("../", parser["aws"]["keyfilepath"]) db = pymysql.connect(host=DB_CONFIG["hostname"], user=DB_CONFIG['username'], passwd=DB_CONFIG['password'], db=DB_CONFIG['name'], cursorclass=pymysql.cursors.DictCursor) cursor = db.cursor() cursor.execute("select * from Worker") workers = cursor.fetchall() command = sys.argv[1] for worker in workers: print("########"+worker['ipAddress']+"########") os.system("ssh -i \""+keyPath+"\" ubuntu@"+worker['ipAddress']+" '"+command+"'") print("################\n")
# ... existing code ... import pymysql import configparser import sys import os import os.path parser = configparser.ConfigParser() parser.read("../halite.ini") DB_CONFIG = parser["database"] keyPath = os.path.join("../", parser["aws"]["keyfilepath"]) db = pymysql.connect(host=DB_CONFIG["hostname"], user=DB_CONFIG['username'], passwd=DB_CONFIG['password'], db=DB_CONFIG['name'], cursorclass=pymysql.cursors.DictCursor) cursor = db.cursor() cursor.execute("select * from Worker") workers = cursor.fetchall() command = sys.argv[1] for worker in workers: print("########"+worker['ipAddress']+"########") os.system("ssh -i \""+keyPath+"\" ubuntu@"+worker['ipAddress']+" '"+command+"'") print("################\n") # ... rest of the code ...
4a14e0945732e5c67aca01d6bd070d00d1697d9a
jsonpull.h
jsonpull.h
typedef enum json_type { JSON_HASH, JSON_ARRAY, JSON_NUMBER, JSON_STRING, JSON_TRUE, JSON_FALSE, JSON_NULL, JSON_COMMA, JSON_COLON, JSON_ITEM, JSON_KEY, JSON_VALUE, } json_type; typedef struct json_object { json_type type; struct json_object *parent; char *string; double number; struct json_object **array; struct json_object **keys; struct json_object **values; int length; int expect; } json_object; struct json_pull { json_object *root; char *error; int (*read)(struct json_pull *); int (*peek)(struct json_pull *); void *source; int line; json_object *container; }; typedef struct json_pull json_pull; typedef void (*json_separator_callback)(json_type type, json_pull *j, void *state); json_pull *json_begin_file(FILE *f); json_pull *json_begin_string(char *s); json_object *json_parse(json_pull *j); json_object *json_parse_with_separators(json_pull *j, json_separator_callback cb, void *state); void json_free(json_object *j); json_object *json_hash_get(json_object *o, char *s);
typedef enum json_type { // These types can be returned by json_parse() JSON_HASH, JSON_ARRAY, JSON_NUMBER, JSON_STRING, JSON_TRUE, JSON_FALSE, JSON_NULL, // These and JSON_HASH and JSON_ARRAY can be called back by json_parse_with_separators() JSON_COMMA, JSON_COLON, // These are only used internally as expectations of what comes next JSON_ITEM, JSON_KEY, JSON_VALUE, } json_type; typedef struct json_object { json_type type; struct json_object *parent; char *string; double number; struct json_object **array; struct json_object **keys; struct json_object **values; int length; int expect; } json_object; struct json_pull { json_object *root; char *error; int (*read)(struct json_pull *); int (*peek)(struct json_pull *); void *source; int line; json_object *container; }; typedef struct json_pull json_pull; typedef void (*json_separator_callback)(json_type type, json_pull *j, void *state); json_pull *json_begin_file(FILE *f); json_pull *json_begin_string(char *s); json_object *json_parse(json_pull *j); json_object *json_parse_with_separators(json_pull *j, json_separator_callback cb, void *state); void json_free(json_object *j); json_object *json_hash_get(json_object *o, char *s);
Clarify what types are used where
Clarify what types are used where
C
bsd-2-clause
mapbox/tippecanoe,joykuotw/tippecanoe,landsurveyorsunited/tippecanoe,mapbox/tippecanoe,ericfischer/json-pull,mapbox/tippecanoe,landsurveyorsunited/tippecanoe,mapbox/tippecanoe,joykuotw/tippecanoe
c
## Code Before: typedef enum json_type { JSON_HASH, JSON_ARRAY, JSON_NUMBER, JSON_STRING, JSON_TRUE, JSON_FALSE, JSON_NULL, JSON_COMMA, JSON_COLON, JSON_ITEM, JSON_KEY, JSON_VALUE, } json_type; typedef struct json_object { json_type type; struct json_object *parent; char *string; double number; struct json_object **array; struct json_object **keys; struct json_object **values; int length; int expect; } json_object; struct json_pull { json_object *root; char *error; int (*read)(struct json_pull *); int (*peek)(struct json_pull *); void *source; int line; json_object *container; }; typedef struct json_pull json_pull; typedef void (*json_separator_callback)(json_type type, json_pull *j, void *state); json_pull *json_begin_file(FILE *f); json_pull *json_begin_string(char *s); json_object *json_parse(json_pull *j); json_object *json_parse_with_separators(json_pull *j, json_separator_callback cb, void *state); void json_free(json_object *j); json_object *json_hash_get(json_object *o, char *s); ## Instruction: Clarify what types are used where ## Code After: typedef enum json_type { // These types can be returned by json_parse() JSON_HASH, JSON_ARRAY, JSON_NUMBER, JSON_STRING, JSON_TRUE, JSON_FALSE, JSON_NULL, // These and JSON_HASH and JSON_ARRAY can be called back by json_parse_with_separators() JSON_COMMA, JSON_COLON, // These are only used internally as expectations of what comes next JSON_ITEM, JSON_KEY, JSON_VALUE, } json_type; typedef struct json_object { json_type type; struct json_object *parent; char *string; double number; struct json_object **array; struct json_object **keys; struct json_object **values; int length; int expect; } json_object; struct json_pull { json_object *root; char *error; int (*read)(struct json_pull *); int (*peek)(struct json_pull *); void *source; int line; json_object *container; }; typedef struct json_pull json_pull; typedef void (*json_separator_callback)(json_type type, json_pull *j, void *state); json_pull *json_begin_file(FILE *f); json_pull *json_begin_string(char *s); json_object *json_parse(json_pull *j); json_object *json_parse_with_separators(json_pull *j, json_separator_callback cb, void *state); void json_free(json_object *j); json_object *json_hash_get(json_object *o, char *s);
// ... existing code ... typedef enum json_type { // These types can be returned by json_parse() JSON_HASH, JSON_ARRAY, JSON_NUMBER, JSON_STRING, JSON_TRUE, JSON_FALSE, JSON_NULL, // These and JSON_HASH and JSON_ARRAY can be called back by json_parse_with_separators() JSON_COMMA, JSON_COLON, // These are only used internally as expectations of what comes next JSON_ITEM, JSON_KEY, JSON_VALUE, } json_type; typedef struct json_object { // ... rest of the code ...
ebb3ea0d72835c4acdc38ba241cf8fd4f828c5cd
setup.py
setup.py
from distutils.core import setup, Extension import sys ext_modules = [ Extension('classified._platform', ['src/classified._platform.c'], extra_compile_args=[ '-DPLATFORM_%s' % (sys.platform.upper()), '-Wunused', ] ) ] setup( name = 'classified', version = '0.0.2', author = 'Wijnand Modderman', author_email = '[email protected]', description = 'Classified data scanner', license = 'MIT', keywords = 'classified sensitive pan pci', packages = [ 'classified', 'classified.probe', ], data_files = [ ('/etc/classified', 'etc/classified.conf.sample'), ], scripts = ['bin/classified'], ext_modules = ext_modules, )
from distutils.core import setup, Extension import sys ext_modules = [ Extension('classified._platform', ['src/classified._platform.c'], extra_compile_args=[ '-DPLATFORM_%s' % (sys.platform.upper()), '-Wunused', ] ) ] setup( name = 'classified', version = '0.0.2', author = 'Wijnand Modderman', author_email = '[email protected]', description = 'Classified data scanner', license = 'MIT', keywords = 'classified sensitive pan pci', packages = [ 'classified', 'classified.probe', 'classified.probe.pan', 'classified.probe.password', 'classified.probe.ssl', ], data_files = [ ('/etc/classified', 'etc/classified.conf.sample'), ], scripts = ['bin/classified'], ext_modules = ext_modules, )
Move probes to their own directory
Move probes to their own directory
Python
mit
tehmaze/classified,tehmaze/classified,tehmaze/classified
python
## Code Before: from distutils.core import setup, Extension import sys ext_modules = [ Extension('classified._platform', ['src/classified._platform.c'], extra_compile_args=[ '-DPLATFORM_%s' % (sys.platform.upper()), '-Wunused', ] ) ] setup( name = 'classified', version = '0.0.2', author = 'Wijnand Modderman', author_email = '[email protected]', description = 'Classified data scanner', license = 'MIT', keywords = 'classified sensitive pan pci', packages = [ 'classified', 'classified.probe', ], data_files = [ ('/etc/classified', 'etc/classified.conf.sample'), ], scripts = ['bin/classified'], ext_modules = ext_modules, ) ## Instruction: Move probes to their own directory ## Code After: from distutils.core import setup, Extension import sys ext_modules = [ Extension('classified._platform', ['src/classified._platform.c'], extra_compile_args=[ '-DPLATFORM_%s' % (sys.platform.upper()), '-Wunused', ] ) ] setup( name = 'classified', version = '0.0.2', author = 'Wijnand Modderman', author_email = '[email protected]', description = 'Classified data scanner', license = 'MIT', keywords = 'classified sensitive pan pci', packages = [ 'classified', 'classified.probe', 'classified.probe.pan', 'classified.probe.password', 'classified.probe.ssl', ], data_files = [ ('/etc/classified', 'etc/classified.conf.sample'), ], scripts = ['bin/classified'], ext_modules = ext_modules, )
// ... existing code ... packages = [ 'classified', 'classified.probe', 'classified.probe.pan', 'classified.probe.password', 'classified.probe.ssl', ], data_files = [ ('/etc/classified', 'etc/classified.conf.sample'), // ... rest of the code ...
897cb5dfd39c5fb94f3a0f49f7b66d5080c2e0ab
src/main/java/me/coley/recaf/ui/controls/ResourceSelectionCell.java
src/main/java/me/coley/recaf/ui/controls/ResourceSelectionCell.java
package me.coley.recaf.ui.controls; import javafx.scene.control.*; import javafx.scene.control.cell.ComboBoxListCell; import javafx.scene.layout.HBox; import me.coley.recaf.control.gui.GuiController; import me.coley.recaf.util.UiUtil; import me.coley.recaf.workspace.*; /** * Cell/renderer for displaying {@link JavaResource}s. */ public class ResourceSelectionCell extends ComboBoxListCell<JavaResource> { private final GuiController controller; /** * @param controller * Controller to use. */ public ResourceSelectionCell(GuiController controller) { this.controller = controller; } @Override public void updateItem(JavaResource item, boolean empty) { super.updateItem(item, empty); if(!empty) { HBox g = new HBox(); if(item != null) { String t = item.toString(); // Add icon for resource types g.getChildren().add(new IconView(UiUtil.getResourceIcon(item))); // Indicate which resource is the primary resource if(item == controller.getWorkspace().getPrimary()) { Label lbl = new Label(" [Primary]"); lbl.getStyleClass().add("bold"); g.getChildren().add(lbl); } setText(t); } setGraphic(g); } else { setGraphic(null); setText(null); } } }
package me.coley.recaf.ui.controls; import javafx.scene.control.*; import javafx.scene.control.cell.ComboBoxListCell; import javafx.scene.layout.HBox; import me.coley.recaf.control.gui.GuiController; import me.coley.recaf.util.UiUtil; import me.coley.recaf.workspace.*; /** * Cell/renderer for displaying {@link JavaResource}s. */ public class ResourceSelectionCell extends ComboBoxListCell<JavaResource> { private final GuiController controller; /** * @param controller * Controller to use. */ public ResourceSelectionCell(GuiController controller) { this.controller = controller; } @Override public void updateItem(JavaResource item, boolean empty) { super.updateItem(item, empty); if(!empty) { HBox g = new HBox(); if(item != null) { String t = item.toString(); // Add icon for resource types g.getChildren().add(new IconView(UiUtil.getResourceIcon(item))); // Indicate which resource is the primary resource if(controller.getWorkspace() != null && item == controller.getWorkspace().getPrimary()) { Label lbl = new Label(" [Primary]"); lbl.getStyleClass().add("bold"); g.getChildren().add(lbl); } setText(t); } setGraphic(g); } else { setGraphic(null); setText(null); } } }
Fix attach failing when no workspace is open
Fix attach failing when no workspace is open
Java
mit
Col-E/Recaf,Col-E/Recaf
java
## Code Before: package me.coley.recaf.ui.controls; import javafx.scene.control.*; import javafx.scene.control.cell.ComboBoxListCell; import javafx.scene.layout.HBox; import me.coley.recaf.control.gui.GuiController; import me.coley.recaf.util.UiUtil; import me.coley.recaf.workspace.*; /** * Cell/renderer for displaying {@link JavaResource}s. */ public class ResourceSelectionCell extends ComboBoxListCell<JavaResource> { private final GuiController controller; /** * @param controller * Controller to use. */ public ResourceSelectionCell(GuiController controller) { this.controller = controller; } @Override public void updateItem(JavaResource item, boolean empty) { super.updateItem(item, empty); if(!empty) { HBox g = new HBox(); if(item != null) { String t = item.toString(); // Add icon for resource types g.getChildren().add(new IconView(UiUtil.getResourceIcon(item))); // Indicate which resource is the primary resource if(item == controller.getWorkspace().getPrimary()) { Label lbl = new Label(" [Primary]"); lbl.getStyleClass().add("bold"); g.getChildren().add(lbl); } setText(t); } setGraphic(g); } else { setGraphic(null); setText(null); } } } ## Instruction: Fix attach failing when no workspace is open ## Code After: package me.coley.recaf.ui.controls; import javafx.scene.control.*; import javafx.scene.control.cell.ComboBoxListCell; import javafx.scene.layout.HBox; import me.coley.recaf.control.gui.GuiController; import me.coley.recaf.util.UiUtil; import me.coley.recaf.workspace.*; /** * Cell/renderer for displaying {@link JavaResource}s. */ public class ResourceSelectionCell extends ComboBoxListCell<JavaResource> { private final GuiController controller; /** * @param controller * Controller to use. */ public ResourceSelectionCell(GuiController controller) { this.controller = controller; } @Override public void updateItem(JavaResource item, boolean empty) { super.updateItem(item, empty); if(!empty) { HBox g = new HBox(); if(item != null) { String t = item.toString(); // Add icon for resource types g.getChildren().add(new IconView(UiUtil.getResourceIcon(item))); // Indicate which resource is the primary resource if(controller.getWorkspace() != null && item == controller.getWorkspace().getPrimary()) { Label lbl = new Label(" [Primary]"); lbl.getStyleClass().add("bold"); g.getChildren().add(lbl); } setText(t); } setGraphic(g); } else { setGraphic(null); setText(null); } } }
// ... existing code ... // Add icon for resource types g.getChildren().add(new IconView(UiUtil.getResourceIcon(item))); // Indicate which resource is the primary resource if(controller.getWorkspace() != null && item == controller.getWorkspace().getPrimary()) { Label lbl = new Label(" [Primary]"); lbl.getStyleClass().add("bold"); g.getChildren().add(lbl); // ... rest of the code ...
926d5333c1556850a3eda6025ac8cf471b67c0a3
condor/probes/setup.py
condor/probes/setup.py
from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/etc/sysconfig', ['config/collect_history'])], license = 'Apache 2.0' )
from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], license='Apache 2.0' )
Add directory for state files
Add directory for state files
Python
apache-2.0
DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs
python
## Code Before: from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/etc/sysconfig', ['config/collect_history'])], license = 'Apache 2.0' ) ## Instruction: Add directory for state files ## Code After: from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], license='Apache 2.0' )
# ... existing code ... scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], license='Apache 2.0' ) # ... rest of the code ...
5eefa21699f2dc7b75a919b5899a25ec7ef5c5b7
tests/unit/test_adapter_session.py
tests/unit/test_adapter_session.py
import pytest from wagtail_personalisation import adapters from tests.factories.segment import SegmentFactory @pytest.mark.django_db def test_get_segments(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) assert len(request.session['segments']) == 2 segments = adapter.get_segments() assert segments == [segment_1, segment_2] @pytest.mark.django_db def test_get_segment_by_id(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2
import pytest from wagtail_personalisation import adapters from tests.factories.segment import SegmentFactory @pytest.mark.django_db def test_get_segments(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) assert len(request.session['segments']) == 2 segments = adapter.get_segments() assert segments == [segment_1, segment_2] @pytest.mark.django_db def test_get_segment_by_id(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2 @pytest.mark.django_db def test_refresh_removes_disabled(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) adapter = adapters.SessionSegmentsAdapter(request) segment_1.status = segment_1.STATUS_DISABLED segment_1.save() adapter.refresh() assert adapter.get_segments() == [segment_2]
Add test for sessionadapter.refresh when segment is disable
Add test for sessionadapter.refresh when segment is disable
Python
mit
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
python
## Code Before: import pytest from wagtail_personalisation import adapters from tests.factories.segment import SegmentFactory @pytest.mark.django_db def test_get_segments(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) assert len(request.session['segments']) == 2 segments = adapter.get_segments() assert segments == [segment_1, segment_2] @pytest.mark.django_db def test_get_segment_by_id(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2 ## Instruction: Add test for sessionadapter.refresh when segment is disable ## Code After: import pytest from wagtail_personalisation import adapters from tests.factories.segment import SegmentFactory @pytest.mark.django_db def test_get_segments(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) assert len(request.session['segments']) == 2 segments = adapter.get_segments() assert segments == [segment_1, segment_2] @pytest.mark.django_db def test_get_segment_by_id(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2 @pytest.mark.django_db def test_refresh_removes_disabled(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) adapter = adapters.SessionSegmentsAdapter(request) segment_1.status = segment_1.STATUS_DISABLED segment_1.save() adapter.refresh() assert adapter.get_segments() == [segment_2]
# ... existing code ... segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2 @pytest.mark.django_db def test_refresh_removes_disabled(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) adapter = adapters.SessionSegmentsAdapter(request) segment_1.status = segment_1.STATUS_DISABLED segment_1.save() adapter.refresh() assert adapter.get_segments() == [segment_2] # ... rest of the code ...
73c842af63a09add43c0e33336dd4eb21153fda1
bin/database.py
bin/database.py
import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
Fix tab vs spaces issue
Fix tab vs spaces issue
Python
mit
scitran/api,scitran/api,scitran/core,scitran/core,scitran/core,scitran/core
python
## Code Before: import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0 ## Instruction: Fix tab vs spaces issue ## Code After: import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
... import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0 ...
171974ab9c069abe14c25ef220f683d4905d1454
socorro/external/rabbitmq/rmq_new_crash_source.py
socorro/external/rabbitmq/rmq_new_crash_source.py
from configman import Namespace, RequiredConfig from configman.converters import class_converter from functools import partial #============================================================================== class RMQNewCrashSource(RequiredConfig): """this class is a refactoring of the iteratior portion of the legacy Socorro processor. It isolates just the part of fetching the ooids of jobs to be processed""" required_config = Namespace() required_config.source.add_option( 'crashstorage_class', doc='the source storage class', default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage', from_string_converter=class_converter ) #-------------------------------------------------------------------------- def __init__(self, config, processor_name, quit_check_callback=None): self.config = config self.crash_store = config.crashstorage_class(config) #-------------------------------------------------------------------------- def close(self): pass #-------------------------------------------------------------------------- def __iter__(self): """an adapter that allows this class can serve as an iterator in a fetch_transform_save app""" for a_crash_id in self.crash_store.new_crashes(): yield ( (a_crash_id,), {'finished_func': partial( self.crash_store.ack_crash, a_crash_id )} ) #-------------------------------------------------------------------------- def __call__(self): return self.__iter__()
from configman import Namespace, RequiredConfig from configman.converters import class_converter from functools import partial #============================================================================== class RMQNewCrashSource(RequiredConfig): """An iterable of crashes from RabbitMQ""" required_config = Namespace() required_config.source.add_option( 'crashstorage_class', doc='the source storage class', default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage', from_string_converter=class_converter ) #-------------------------------------------------------------------------- def __init__(self, config, processor_name, quit_check_callback=None): self.config = config self.crash_store = config.crashstorage_class(config) #-------------------------------------------------------------------------- def close(self): pass #-------------------------------------------------------------------------- def __iter__(self): """Return an iterator over crashes from RabbitMQ. Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg is a crash ID, and the kwargs contain only a callback function which the FTS app will call to send an ack to Rabbit after processing is complete. """ for a_crash_id in self.crash_store.new_crashes(): yield ( (a_crash_id,), {'finished_func': partial( self.crash_store.ack_crash, a_crash_id )} ) #-------------------------------------------------------------------------- def __call__(self): return self.__iter__()
Correct docs on RabbitMQ crash source.
Correct docs on RabbitMQ crash source.
Python
mpl-2.0
linearregression/socorro,linearregression/socorro,Serg09/socorro,Serg09/socorro,linearregression/socorro,m8ttyB/socorro,Serg09/socorro,luser/socorro,twobraids/socorro,lonnen/socorro,bsmedberg/socorro,AdrianGaudebert/socorro,cliqz/socorro,yglazko/socorro,pcabido/socorro,lonnen/socorro,twobraids/socorro,bsmedberg/socorro,adngdb/socorro,bsmedberg/socorro,AdrianGaudebert/socorro,Tayamarn/socorro,cliqz/socorro,Tchanders/socorro,twobraids/socorro,Serg09/socorro,pcabido/socorro,m8ttyB/socorro,linearregression/socorro,KaiRo-at/socorro,spthaolt/socorro,Tayamarn/socorro,luser/socorro,Tchanders/socorro,spthaolt/socorro,KaiRo-at/socorro,KaiRo-at/socorro,mozilla/socorro,mozilla/socorro,Tayamarn/socorro,Tayamarn/socorro,Serg09/socorro,cliqz/socorro,adngdb/socorro,spthaolt/socorro,mozilla/socorro,yglazko/socorro,luser/socorro,Serg09/socorro,yglazko/socorro,AdrianGaudebert/socorro,cliqz/socorro,pcabido/socorro,rhelmer/socorro,Tchanders/socorro,Tayamarn/socorro,AdrianGaudebert/socorro,yglazko/socorro,m8ttyB/socorro,bsmedberg/socorro,lonnen/socorro,adngdb/socorro,m8ttyB/socorro,rhelmer/socorro,Tchanders/socorro,mozilla/socorro,luser/socorro,twobraids/socorro,adngdb/socorro,mozilla/socorro,m8ttyB/socorro,Tchanders/socorro,luser/socorro,AdrianGaudebert/socorro,bsmedberg/socorro,twobraids/socorro,spthaolt/socorro,Tayamarn/socorro,mozilla/socorro,twobraids/socorro,KaiRo-at/socorro,pcabido/socorro,linearregression/socorro,Tchanders/socorro,KaiRo-at/socorro,cliqz/socorro,lonnen/socorro,pcabido/socorro,yglazko/socorro,adngdb/socorro,rhelmer/socorro,pcabido/socorro,linearregression/socorro,AdrianGaudebert/socorro,luser/socorro,spthaolt/socorro,rhelmer/socorro,KaiRo-at/socorro,cliqz/socorro,m8ttyB/socorro,rhelmer/socorro,adngdb/socorro,spthaolt/socorro,rhelmer/socorro,yglazko/socorro
python
## Code Before: from configman import Namespace, RequiredConfig from configman.converters import class_converter from functools import partial #============================================================================== class RMQNewCrashSource(RequiredConfig): """this class is a refactoring of the iteratior portion of the legacy Socorro processor. It isolates just the part of fetching the ooids of jobs to be processed""" required_config = Namespace() required_config.source.add_option( 'crashstorage_class', doc='the source storage class', default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage', from_string_converter=class_converter ) #-------------------------------------------------------------------------- def __init__(self, config, processor_name, quit_check_callback=None): self.config = config self.crash_store = config.crashstorage_class(config) #-------------------------------------------------------------------------- def close(self): pass #-------------------------------------------------------------------------- def __iter__(self): """an adapter that allows this class can serve as an iterator in a fetch_transform_save app""" for a_crash_id in self.crash_store.new_crashes(): yield ( (a_crash_id,), {'finished_func': partial( self.crash_store.ack_crash, a_crash_id )} ) #-------------------------------------------------------------------------- def __call__(self): return self.__iter__() ## Instruction: Correct docs on RabbitMQ crash source. ## Code After: from configman import Namespace, RequiredConfig from configman.converters import class_converter from functools import partial #============================================================================== class RMQNewCrashSource(RequiredConfig): """An iterable of crashes from RabbitMQ""" required_config = Namespace() required_config.source.add_option( 'crashstorage_class', doc='the source storage class', default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage', from_string_converter=class_converter ) #-------------------------------------------------------------------------- def __init__(self, config, processor_name, quit_check_callback=None): self.config = config self.crash_store = config.crashstorage_class(config) #-------------------------------------------------------------------------- def close(self): pass #-------------------------------------------------------------------------- def __iter__(self): """Return an iterator over crashes from RabbitMQ. Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg is a crash ID, and the kwargs contain only a callback function which the FTS app will call to send an ack to Rabbit after processing is complete. """ for a_crash_id in self.crash_store.new_crashes(): yield ( (a_crash_id,), {'finished_func': partial( self.crash_store.ack_crash, a_crash_id )} ) #-------------------------------------------------------------------------- def __call__(self): return self.__iter__()
... from functools import partial #============================================================================== class RMQNewCrashSource(RequiredConfig): """An iterable of crashes from RabbitMQ""" required_config = Namespace() required_config.source.add_option( 'crashstorage_class', ... #-------------------------------------------------------------------------- def __iter__(self): """Return an iterator over crashes from RabbitMQ. Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg is a crash ID, and the kwargs contain only a callback function which the FTS app will call to send an ack to Rabbit after processing is complete. """ for a_crash_id in self.crash_store.new_crashes(): yield ( (a_crash_id,), {'finished_func': partial( self.crash_store.ack_crash, a_crash_id ...
24fe59c0f5df9343337549eb4495d6ca0e1e58d1
iconizer/iconizer_main.py
iconizer/iconizer_main.py
from iconizer.console.launcher import CrossGuiLauncher from iconizer.qtconsole.PyQtGuiFactory import PyQtGuiFactory class Iconizer(object): def __init__(self): self.server = CrossGuiLauncher(PyQtGuiFactory()) self.server.start() self.server.start_cross_gui_launcher_no_return() def execute(self, app_descriptor_dict): #Send request to start a new app self.server.launch(app_descriptor_dict) def main(): Iconizer() if __name__ == '__main__': main()
from iconizer.console.launcher import CrossGuiLauncher from iconizer.qtconsole.PyQtGuiFactory import PyQtGuiFactory class Iconizer(object): def start_gui(self): self.server = CrossGuiLauncher(PyQtGuiFactory()) self.server.start() self.server.start_cross_gui_launcher_no_return() def execute(self, app_descriptor_dict): #Send request to start a new app self.server.launch(app_descriptor_dict) def main(): Iconizer().start_gui() if __name__ == '__main__': main()
Move GUI creation out of __init__ function.
Move GUI creation out of __init__ function.
Python
bsd-3-clause
weijia/iconizer
python
## Code Before: from iconizer.console.launcher import CrossGuiLauncher from iconizer.qtconsole.PyQtGuiFactory import PyQtGuiFactory class Iconizer(object): def __init__(self): self.server = CrossGuiLauncher(PyQtGuiFactory()) self.server.start() self.server.start_cross_gui_launcher_no_return() def execute(self, app_descriptor_dict): #Send request to start a new app self.server.launch(app_descriptor_dict) def main(): Iconizer() if __name__ == '__main__': main() ## Instruction: Move GUI creation out of __init__ function. ## Code After: from iconizer.console.launcher import CrossGuiLauncher from iconizer.qtconsole.PyQtGuiFactory import PyQtGuiFactory class Iconizer(object): def start_gui(self): self.server = CrossGuiLauncher(PyQtGuiFactory()) self.server.start() self.server.start_cross_gui_launcher_no_return() def execute(self, app_descriptor_dict): #Send request to start a new app self.server.launch(app_descriptor_dict) def main(): Iconizer().start_gui() if __name__ == '__main__': main()
... class Iconizer(object): def start_gui(self): self.server = CrossGuiLauncher(PyQtGuiFactory()) self.server.start() self.server.start_cross_gui_launcher_no_return() ... def main(): Iconizer().start_gui() if __name__ == '__main__': main() ...
b7e657134c21b62e78453b11f0745e0048e346bf
examples/simple_distribution.py
examples/simple_distribution.py
import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. users = ['user1', 'user2'] tasks = ['task1', 'task2'] preferences = [ [1, 2], [2, 1], ] # Run solver start_time = time.time() distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') elapsed_time = time.time() - start_time # Output print(output) if __name__ == '__main__': main()
import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. users = ['user1', 'user2'] tasks = ['task1', 'task2'] preferences = [ [1, 2], [2, 1], ] # Run solver distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') # Output print(output) if __name__ == '__main__': main()
Remove time metrics from the simple example
Remove time metrics from the simple example
Python
mit
Hackathonners/vania
python
## Code Before: import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. users = ['user1', 'user2'] tasks = ['task1', 'task2'] preferences = [ [1, 2], [2, 1], ] # Run solver start_time = time.time() distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') elapsed_time = time.time() - start_time # Output print(output) if __name__ == '__main__': main() ## Instruction: Remove time metrics from the simple example ## Code After: import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. users = ['user1', 'user2'] tasks = ['task1', 'task2'] preferences = [ [1, 2], [2, 1], ] # Run solver distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') # Output print(output) if __name__ == '__main__': main()
// ... existing code ... ] # Run solver distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') # Output print(output) // ... rest of the code ...
c82dee5e7aa8ced12906875b7f44ff7b7671eacb
telegram-bot-api/src/main/kotlin/com/github/telegram/domain/Update.kt
telegram-bot-api/src/main/kotlin/com/github/telegram/domain/Update.kt
package com.github.telegram.domain import com.google.gson.annotations.SerializedName as Name /** * This object represents an incoming update. * Only *one* of the optional parameters can be present in any given update. * * @property updateId The update‘s unique identifier. * Update identifiers start from a certain positive number and increase sequentially. * @property message New incoming message of any kind — text, photo, sticker, etc. * @property editedMessage New version of a message that is known to the bot and was edited. * @property inlineQuery New incoming inline query. * @property chosenInlineResult The result of an inline query that was chosen by a user and sent to their chat partner. * @property callbackQuery New incoming callback query. */ data class Update( @Name("update_id") val updateId: Long, @Name("message") val message: Message?, @Name("edited_message") val editedMessage: Message?, @Name("inline_query") val inlineQuery: InlineQuery?, @Name("chosen_inline_result") val chosenInlineResult: ChosenInlineResult?, @Name("callback_query") val callbackQuery: CallbackQuery?) { val senderId: Long get() { return when { message != null -> message.chat.id editedMessage != null -> editedMessage.chat.id inlineQuery != null -> inlineQuery.from.id chosenInlineResult != null -> chosenInlineResult.from.id callbackQuery != null -> callbackQuery.from.id else -> throw IllegalStateException("Everything is null.") } } }
package com.github.telegram.domain import com.google.gson.annotations.SerializedName as Name /** * This object represents an incoming update. * Only *one* of the optional parameters can be present in any given update. * * @property updateId The update‘s unique identifier. * Update identifiers start from a certain positive number and increase sequentially. * @property message New incoming message of any kind — text, photo, sticker, etc. * @property editedMessage New version of a message that is known to the bot and was edited. * @property inlineQuery New incoming inline query. * @property chosenInlineResult The result of an inline query that was chosen by a user and sent to their chat partner. * @property callbackQuery New incoming callback query. */ data class Update( @Name("update_id") val updateId: Long, @Name("message") val message: Message?, @Name("edited_message") val editedMessage: Message?, @Name("inline_query") val inlineQuery: InlineQuery?, @Name("chosen_inline_result") val chosenInlineResult: ChosenInlineResult?, @Name("callback_query") val callbackQuery: CallbackQuery?) { val senderId: Long get() { return when { message != null -> message.chat.id editedMessage != null -> editedMessage.chat.id inlineQuery != null -> inlineQuery.from.id chosenInlineResult != null -> chosenInlineResult.from.id callbackQuery != null -> callbackQuery.from.id else -> throw IllegalStateException("Cannot evaluate sender for update: $this") } } }
Add more detailed exception message.
Add more detailed exception message.
Kotlin
mit
denzelby/telegram-bot-bumblebee,fare1990/telegram-bot-bumblebee,fare1990/telegram-bot-bumblebee,fare1990/telegram-bot-bumblebee
kotlin
## Code Before: package com.github.telegram.domain import com.google.gson.annotations.SerializedName as Name /** * This object represents an incoming update. * Only *one* of the optional parameters can be present in any given update. * * @property updateId The update‘s unique identifier. * Update identifiers start from a certain positive number and increase sequentially. * @property message New incoming message of any kind — text, photo, sticker, etc. * @property editedMessage New version of a message that is known to the bot and was edited. * @property inlineQuery New incoming inline query. * @property chosenInlineResult The result of an inline query that was chosen by a user and sent to their chat partner. * @property callbackQuery New incoming callback query. */ data class Update( @Name("update_id") val updateId: Long, @Name("message") val message: Message?, @Name("edited_message") val editedMessage: Message?, @Name("inline_query") val inlineQuery: InlineQuery?, @Name("chosen_inline_result") val chosenInlineResult: ChosenInlineResult?, @Name("callback_query") val callbackQuery: CallbackQuery?) { val senderId: Long get() { return when { message != null -> message.chat.id editedMessage != null -> editedMessage.chat.id inlineQuery != null -> inlineQuery.from.id chosenInlineResult != null -> chosenInlineResult.from.id callbackQuery != null -> callbackQuery.from.id else -> throw IllegalStateException("Everything is null.") } } } ## Instruction: Add more detailed exception message. ## Code After: package com.github.telegram.domain import com.google.gson.annotations.SerializedName as Name /** * This object represents an incoming update. * Only *one* of the optional parameters can be present in any given update. * * @property updateId The update‘s unique identifier. * Update identifiers start from a certain positive number and increase sequentially. * @property message New incoming message of any kind — text, photo, sticker, etc. * @property editedMessage New version of a message that is known to the bot and was edited. * @property inlineQuery New incoming inline query. * @property chosenInlineResult The result of an inline query that was chosen by a user and sent to their chat partner. * @property callbackQuery New incoming callback query. */ data class Update( @Name("update_id") val updateId: Long, @Name("message") val message: Message?, @Name("edited_message") val editedMessage: Message?, @Name("inline_query") val inlineQuery: InlineQuery?, @Name("chosen_inline_result") val chosenInlineResult: ChosenInlineResult?, @Name("callback_query") val callbackQuery: CallbackQuery?) { val senderId: Long get() { return when { message != null -> message.chat.id editedMessage != null -> editedMessage.chat.id inlineQuery != null -> inlineQuery.from.id chosenInlineResult != null -> chosenInlineResult.from.id callbackQuery != null -> callbackQuery.from.id else -> throw IllegalStateException("Cannot evaluate sender for update: $this") } } }
# ... existing code ... inlineQuery != null -> inlineQuery.from.id chosenInlineResult != null -> chosenInlineResult.from.id callbackQuery != null -> callbackQuery.from.id else -> throw IllegalStateException("Cannot evaluate sender for update: $this") } } } # ... rest of the code ...
5f88686bdd089d67192f75eac9d3f46effad2983
linter.py
linter.py
"""This module exports the scss-lint plugin linter class.""" import os from SublimeLinter.lint import RubyLinter, util class Scss(RubyLinter): """Provides an interface to the scss-lint executable.""" syntax = ('css', 'sass', 'scss') cmd = 'ruby -S scss-lint' regex = r'^.+?:(?P<line>\d+) (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)' tempfile_suffix = 'scss' defaults = { '--include-linter:,': '', '--exclude-linter:,': '' } inline_overrides = ('bundle-exec', 'include-linter', 'exclude-linter') comment_re = r'^\s*/[/\*]' config_file = ('--config', '.scss-lint.yml', '~')
"""This module exports the scss-lint plugin linter class.""" import os from SublimeLinter.lint import RubyLinter, util class Scss(RubyLinter): """Provides an interface to the scss-lint executable.""" syntax = ('css', 'sass', 'scss') cmd = 'ruby -S scss-lint' regex = r'^.+?:(?P<line>\d+)(?::(?P<column>\d+))? (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)' tempfile_suffix = 'scss' defaults = { '--include-linter:,': '', '--exclude-linter:,': '' } inline_overrides = ('bundle-exec', 'include-linter', 'exclude-linter') comment_re = r'^\s*/[/\*]' config_file = ('--config', '.scss-lint.yml', '~')
Fix regex for different output from scss-lint 0.49.0
Fix regex for different output from scss-lint 0.49.0
Python
mit
attenzione/SublimeLinter-scss-lint
python
## Code Before: """This module exports the scss-lint plugin linter class.""" import os from SublimeLinter.lint import RubyLinter, util class Scss(RubyLinter): """Provides an interface to the scss-lint executable.""" syntax = ('css', 'sass', 'scss') cmd = 'ruby -S scss-lint' regex = r'^.+?:(?P<line>\d+) (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)' tempfile_suffix = 'scss' defaults = { '--include-linter:,': '', '--exclude-linter:,': '' } inline_overrides = ('bundle-exec', 'include-linter', 'exclude-linter') comment_re = r'^\s*/[/\*]' config_file = ('--config', '.scss-lint.yml', '~') ## Instruction: Fix regex for different output from scss-lint 0.49.0 ## Code After: """This module exports the scss-lint plugin linter class.""" import os from SublimeLinter.lint import RubyLinter, util class Scss(RubyLinter): """Provides an interface to the scss-lint executable.""" syntax = ('css', 'sass', 'scss') cmd = 'ruby -S scss-lint' regex = r'^.+?:(?P<line>\d+)(?::(?P<column>\d+))? (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)' tempfile_suffix = 'scss' defaults = { '--include-linter:,': '', '--exclude-linter:,': '' } inline_overrides = ('bundle-exec', 'include-linter', 'exclude-linter') comment_re = r'^\s*/[/\*]' config_file = ('--config', '.scss-lint.yml', '~')
# ... existing code ... syntax = ('css', 'sass', 'scss') cmd = 'ruby -S scss-lint' regex = r'^.+?:(?P<line>\d+)(?::(?P<column>\d+))? (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)' tempfile_suffix = 'scss' defaults = { '--include-linter:,': '', # ... rest of the code ...
dd3cc71bb09ab2fb265b3f4bdda69cb1880842c6
tests/utils_test.py
tests/utils_test.py
import unittest from zttf.utils import fixed_version, binary_search_parameters class TestUtils(unittest.TestCase): def test_fixed_version(self): cases = [ (0x00005000, 0.5), (0x00010000, 1.0), (0x00035000, 3.5), (0x00105000, 10.5) ] for case in cases: self.assertEqual(fixed_version(case[0]), case[1]) def test_binary_parameters(self): cases = { 39: (32, 5), 10: (8, 3), 19: (16, 4) } for n, result in cases.items(): self.assertEqual(binary_search_parameters(n), result)
import unittest import struct from zttf.utils import fixed_version, binary_search_parameters, ttf_checksum class TestUtils(unittest.TestCase): def test_fixed_version(self): cases = [ (0x00005000, 0.5), (0x00010000, 1.0), (0x00035000, 3.5), (0x00105000, 10.5) ] for case in cases: self.assertEqual(fixed_version(case[0]), case[1]) def test_binary_parameters(self): cases = { 39: (32, 5), 10: (8, 3), 19: (16, 4) } for n, result in cases.items(): self.assertEqual(binary_search_parameters(n), result) def test_checksum(self): data = struct.pack(">12I", *range(0, 12)) self.assertEqual(len(data), 48) self.assertEqual(ttf_checksum(data), 66) self.assertEqual(ttf_checksum(struct.pack(">12I", *range(1000, 13000, 1000))), 78000) self.assertEqual(ttf_checksum(struct.pack(">512I", *range(1024, 1024 * 2048, 4096))), 0x1FF80000)
Add a simple test for the checksum routine.
Add a simple test for the checksum routine.
Python
apache-2.0
zathras777/zttf
python
## Code Before: import unittest from zttf.utils import fixed_version, binary_search_parameters class TestUtils(unittest.TestCase): def test_fixed_version(self): cases = [ (0x00005000, 0.5), (0x00010000, 1.0), (0x00035000, 3.5), (0x00105000, 10.5) ] for case in cases: self.assertEqual(fixed_version(case[0]), case[1]) def test_binary_parameters(self): cases = { 39: (32, 5), 10: (8, 3), 19: (16, 4) } for n, result in cases.items(): self.assertEqual(binary_search_parameters(n), result) ## Instruction: Add a simple test for the checksum routine. ## Code After: import unittest import struct from zttf.utils import fixed_version, binary_search_parameters, ttf_checksum class TestUtils(unittest.TestCase): def test_fixed_version(self): cases = [ (0x00005000, 0.5), (0x00010000, 1.0), (0x00035000, 3.5), (0x00105000, 10.5) ] for case in cases: self.assertEqual(fixed_version(case[0]), case[1]) def test_binary_parameters(self): cases = { 39: (32, 5), 10: (8, 3), 19: (16, 4) } for n, result in cases.items(): self.assertEqual(binary_search_parameters(n), result) def test_checksum(self): data = struct.pack(">12I", *range(0, 12)) self.assertEqual(len(data), 48) self.assertEqual(ttf_checksum(data), 66) self.assertEqual(ttf_checksum(struct.pack(">12I", *range(1000, 13000, 1000))), 78000) self.assertEqual(ttf_checksum(struct.pack(">512I", *range(1024, 1024 * 2048, 4096))), 0x1FF80000)
# ... existing code ... import unittest import struct from zttf.utils import fixed_version, binary_search_parameters, ttf_checksum class TestUtils(unittest.TestCase): # ... modified code ... } for n, result in cases.items(): self.assertEqual(binary_search_parameters(n), result) def test_checksum(self): data = struct.pack(">12I", *range(0, 12)) self.assertEqual(len(data), 48) self.assertEqual(ttf_checksum(data), 66) self.assertEqual(ttf_checksum(struct.pack(">12I", *range(1000, 13000, 1000))), 78000) self.assertEqual(ttf_checksum(struct.pack(">512I", *range(1024, 1024 * 2048, 4096))), 0x1FF80000) # ... rest of the code ...
494fd9dd3cb526682e5cb6fabc12ce4263875aea
flask_slacker/__init__.py
flask_slacker/__init__.py
from slacker import Slacker as BaseSlacker __version__ = '0.0.1' class Slacker(object): def __init__(self, app=None, **kwargs): """Initialize the Slacker interface. :param app: Flask application """ if app is not None: self.init_app(app) def init_app(self, app, config=None): """ Initialize the app in Flask. """ if not (config is None or isinstance(config, dict)): raise ValueError("`config` must be an instance of dict or None") # register application within app app.extensions = getattr(app, 'extensions', {}) app.extensions['slack'] = BaseSlacker(**config)
from slacker import Slacker as BaseSlacker, DEFAULT_TIMEOUT __version__ = '0.0.1' class Slacker(object): def __init__(self, app=None): """Initialize the Slacker interface. :param app: Flask application """ if app is not None: self.init_app(app) def init_app(self, app): """ Initialize the app in Flask. """ app.config.setdefault('SLACKER_TIMEOUT', DEFAULT_TIMEOUT) if 'SLACKER_TOKEN' not in app.config: raise Exception('Missing SLACKER_TOKEN in your config.') token = app.config['SLACKER_TOKEN'] timeout = app.config['SLACKER_TIMEOUT'] # register application within app app.extensions = getattr(app, 'extensions', {}) app.extensions['slack'] = BaseSlacker(token, timeout=timeout)
Load app configs for Slacker
Load app configs for Slacker
Python
mit
mdsrosa/flask-slacker
python
## Code Before: from slacker import Slacker as BaseSlacker __version__ = '0.0.1' class Slacker(object): def __init__(self, app=None, **kwargs): """Initialize the Slacker interface. :param app: Flask application """ if app is not None: self.init_app(app) def init_app(self, app, config=None): """ Initialize the app in Flask. """ if not (config is None or isinstance(config, dict)): raise ValueError("`config` must be an instance of dict or None") # register application within app app.extensions = getattr(app, 'extensions', {}) app.extensions['slack'] = BaseSlacker(**config) ## Instruction: Load app configs for Slacker ## Code After: from slacker import Slacker as BaseSlacker, DEFAULT_TIMEOUT __version__ = '0.0.1' class Slacker(object): def __init__(self, app=None): """Initialize the Slacker interface. :param app: Flask application """ if app is not None: self.init_app(app) def init_app(self, app): """ Initialize the app in Flask. """ app.config.setdefault('SLACKER_TIMEOUT', DEFAULT_TIMEOUT) if 'SLACKER_TOKEN' not in app.config: raise Exception('Missing SLACKER_TOKEN in your config.') token = app.config['SLACKER_TOKEN'] timeout = app.config['SLACKER_TIMEOUT'] # register application within app app.extensions = getattr(app, 'extensions', {}) app.extensions['slack'] = BaseSlacker(token, timeout=timeout)
// ... existing code ... from slacker import Slacker as BaseSlacker, DEFAULT_TIMEOUT __version__ = '0.0.1' // ... modified code ... class Slacker(object): def __init__(self, app=None): """Initialize the Slacker interface. :param app: Flask application ... if app is not None: self.init_app(app) def init_app(self, app): """ Initialize the app in Flask. """ app.config.setdefault('SLACKER_TIMEOUT', DEFAULT_TIMEOUT) if 'SLACKER_TOKEN' not in app.config: raise Exception('Missing SLACKER_TOKEN in your config.') token = app.config['SLACKER_TOKEN'] timeout = app.config['SLACKER_TIMEOUT'] # register application within app app.extensions = getattr(app, 'extensions', {}) app.extensions['slack'] = BaseSlacker(token, timeout=timeout) // ... rest of the code ...
f4841c1b0ddecd27544e2fd36429fd72c102d162
Lib/fontTools/help/__main__.py
Lib/fontTools/help/__main__.py
"""Show this help""" import pkgutil import sys from setuptools import find_packages from pkgutil import iter_modules import fontTools import importlib def get_description(pkg): try: return __import__(pkg+".__main__",globals(),locals(),["__doc__"]).__doc__ except Exception as e: return None def show_help_list(): path = fontTools.__path__[0] for pkg in find_packages(path): qualifiedPkg = "fontTools."+pkg description = get_description(qualifiedPkg) if description: print("fontools %-10s %s" % (pkg, description)) pkgpath = path + '/' + qualifiedPkg.replace('.', '/') if (sys.version_info.major == 3 and sys.version_info.minor < 6): for _, name, ispkg in iter_modules([pkgpath]): if get_description(pkg+ '.' + name): modules.add(pkg + '.' + name) else: for info in iter_modules([pkgpath]): if get_description(pkg+ '.' + info.name): modules.add(pkg + '.' + info.name) if __name__ == '__main__': print("fonttools v%s\n" % fontTools.__version__) show_help_list()
"""Show this help""" import pkgutil import sys from setuptools import find_packages from pkgutil import iter_modules import fontTools import importlib def describe(pkg): try: description = __import__( "fontTools." + pkg + ".__main__", globals(), locals(), ["__doc__"] ).__doc__ print("fonttools %-10s %s" % (pkg, description), file=sys.stderr) except Exception as e: return None def show_help_list(): path = fontTools.__path__[0] for pkg in find_packages(path): qualifiedPkg = "fontTools." + pkg describe(pkg) pkgpath = path + "/" + qualifiedPkg.replace(".", "/") for info in iter_modules([pkgpath]): describe(pkg + "." + info.name) if __name__ == "__main__": print("fonttools v%s\n" % fontTools.__version__, file=sys.stderr) show_help_list()
Address feedback, reformat, simplify, fix bugs and typo
Address feedback, reformat, simplify, fix bugs and typo
Python
mit
googlefonts/fonttools,fonttools/fonttools
python
## Code Before: """Show this help""" import pkgutil import sys from setuptools import find_packages from pkgutil import iter_modules import fontTools import importlib def get_description(pkg): try: return __import__(pkg+".__main__",globals(),locals(),["__doc__"]).__doc__ except Exception as e: return None def show_help_list(): path = fontTools.__path__[0] for pkg in find_packages(path): qualifiedPkg = "fontTools."+pkg description = get_description(qualifiedPkg) if description: print("fontools %-10s %s" % (pkg, description)) pkgpath = path + '/' + qualifiedPkg.replace('.', '/') if (sys.version_info.major == 3 and sys.version_info.minor < 6): for _, name, ispkg in iter_modules([pkgpath]): if get_description(pkg+ '.' + name): modules.add(pkg + '.' + name) else: for info in iter_modules([pkgpath]): if get_description(pkg+ '.' + info.name): modules.add(pkg + '.' + info.name) if __name__ == '__main__': print("fonttools v%s\n" % fontTools.__version__) show_help_list() ## Instruction: Address feedback, reformat, simplify, fix bugs and typo ## Code After: """Show this help""" import pkgutil import sys from setuptools import find_packages from pkgutil import iter_modules import fontTools import importlib def describe(pkg): try: description = __import__( "fontTools." + pkg + ".__main__", globals(), locals(), ["__doc__"] ).__doc__ print("fonttools %-10s %s" % (pkg, description), file=sys.stderr) except Exception as e: return None def show_help_list(): path = fontTools.__path__[0] for pkg in find_packages(path): qualifiedPkg = "fontTools." + pkg describe(pkg) pkgpath = path + "/" + qualifiedPkg.replace(".", "/") for info in iter_modules([pkgpath]): describe(pkg + "." + info.name) if __name__ == "__main__": print("fonttools v%s\n" % fontTools.__version__, file=sys.stderr) show_help_list()
// ... existing code ... import importlib def describe(pkg): try: description = __import__( "fontTools." + pkg + ".__main__", globals(), locals(), ["__doc__"] ).__doc__ print("fonttools %-10s %s" % (pkg, description), file=sys.stderr) except Exception as e: return None def show_help_list(): path = fontTools.__path__[0] for pkg in find_packages(path): qualifiedPkg = "fontTools." + pkg describe(pkg) pkgpath = path + "/" + qualifiedPkg.replace(".", "/") for info in iter_modules([pkgpath]): describe(pkg + "." + info.name) if __name__ == "__main__": print("fonttools v%s\n" % fontTools.__version__, file=sys.stderr) show_help_list() // ... rest of the code ...
2fd6970bfaf3ef0ec339ad04a4b3d34ddbaaa631
src/test/java/steps/hooks/GlobalHooks.java
src/test/java/steps/hooks/GlobalHooks.java
/** * Created with IntelliJ IDEA. * User: jhasmanyquiroz * Date: 11/25/15 * Time: 12:12 PM * To change this template use File | Settings | File Templates. */ package steps.hooks; import cucumber.api.Scenario; import cucumber.api.java.After; import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.WebDriverException; import static framework.BrowserManager.getInstance; public class GlobalHooks { @After public void embedScreenShot(Scenario scenario) { if (scenario.isFailed()) { try { System.out.println("#########################Take a snapshot#######################"); byte[] screenshot = ((TakesScreenshot)getInstance().getDriver()).getScreenshotAs(OutputType.BYTES); scenario.embed(screenshot, "image/png"); System.out.println("#######SCENARIO: "+scenario.getName()); } catch (WebDriverException somePlatformsDontSupportScreenshots) { System.err.println(somePlatformsDontSupportScreenshots.getMessage()); } } //if (scenario.getName().equalsIgnoreCase("")) } }
/** * Created with IntelliJ IDEA. * User: jhasmanyquiroz * Date: 11/25/15 * Time: 12:12 PM * To change this template use File | Settings | File Templates. */ package steps.hooks; import cucumber.api.Scenario; import cucumber.api.java.After; import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.WebDriverException; import static framework.BrowserManager.getInstance; public class GlobalHooks { @After public void embedScreenShot(Scenario scenario) { if (scenario.isFailed()) { try { byte[] screenshot = ((TakesScreenshot)getInstance().getDriver()).getScreenshotAs(OutputType.BYTES); scenario.embed(screenshot, "image/png"); } catch (WebDriverException somePlatformsDontSupportScreenshots) { System.err.println(somePlatformsDontSupportScreenshots.getMessage()); } } //if (scenario.getName().equalsIgnoreCase("")) } }
Refactor the Global hooks file
Refactor the Global hooks file
Java
mit
j4z31/QADEV04-Coursera-GUI
java
## Code Before: /** * Created with IntelliJ IDEA. * User: jhasmanyquiroz * Date: 11/25/15 * Time: 12:12 PM * To change this template use File | Settings | File Templates. */ package steps.hooks; import cucumber.api.Scenario; import cucumber.api.java.After; import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.WebDriverException; import static framework.BrowserManager.getInstance; public class GlobalHooks { @After public void embedScreenShot(Scenario scenario) { if (scenario.isFailed()) { try { System.out.println("#########################Take a snapshot#######################"); byte[] screenshot = ((TakesScreenshot)getInstance().getDriver()).getScreenshotAs(OutputType.BYTES); scenario.embed(screenshot, "image/png"); System.out.println("#######SCENARIO: "+scenario.getName()); } catch (WebDriverException somePlatformsDontSupportScreenshots) { System.err.println(somePlatformsDontSupportScreenshots.getMessage()); } } //if (scenario.getName().equalsIgnoreCase("")) } } ## Instruction: Refactor the Global hooks file ## Code After: /** * Created with IntelliJ IDEA. * User: jhasmanyquiroz * Date: 11/25/15 * Time: 12:12 PM * To change this template use File | Settings | File Templates. */ package steps.hooks; import cucumber.api.Scenario; import cucumber.api.java.After; import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.WebDriverException; import static framework.BrowserManager.getInstance; public class GlobalHooks { @After public void embedScreenShot(Scenario scenario) { if (scenario.isFailed()) { try { byte[] screenshot = ((TakesScreenshot)getInstance().getDriver()).getScreenshotAs(OutputType.BYTES); scenario.embed(screenshot, "image/png"); } catch (WebDriverException somePlatformsDontSupportScreenshots) { System.err.println(somePlatformsDontSupportScreenshots.getMessage()); } } //if (scenario.getName().equalsIgnoreCase("")) } }
# ... existing code ... public void embedScreenShot(Scenario scenario) { if (scenario.isFailed()) { try { byte[] screenshot = ((TakesScreenshot)getInstance().getDriver()).getScreenshotAs(OutputType.BYTES); scenario.embed(screenshot, "image/png"); } catch (WebDriverException somePlatformsDontSupportScreenshots) { System.err.println(somePlatformsDontSupportScreenshots.getMessage()); } # ... rest of the code ...
0774eea1027bc9bb88b5289854aa26109f258712
great_expectations/exceptions.py
great_expectations/exceptions.py
class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message
class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message self.batch_kwargs = batch_kwargs
Add batch_kwargs to custom error
Add batch_kwargs to custom error
Python
apache-2.0
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
python
## Code Before: class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message ## Instruction: Add batch_kwargs to custom error ## Code After: class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message self.batch_kwargs = batch_kwargs
# ... existing code ... class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message self.batch_kwargs = batch_kwargs # ... rest of the code ...
4166bf21aa8ff9264724ef8101231557f40b80ef
production.py
production.py
from flask import Flask, render_template, jsonify, make_response, request, current_app from gevent import monkey from gevent import wsgi import app monkey.patch_all() app = Flask(__name__) server = wsgi.WSGIServer(('203.29.62.211', 5050), app) server.serve_forever()
from flask import Flask, render_template, jsonify, make_response, request, current_app from gevent import monkey from gevent import wsgi import app monkey.patch_all() app = Flask(__name__) server = wsgi.WSGIServer(('203.29.62.211', 5050), app) server.serve_forever() @app.route('/') def index(): return render_template('index.html')
Add one route so that our monitoring system stops thinking this system is down
Add one route so that our monitoring system stops thinking this system is down
Python
apache-2.0
ishgroup/lightbook,ishgroup/lightbook,ishgroup/lightbook
python
## Code Before: from flask import Flask, render_template, jsonify, make_response, request, current_app from gevent import monkey from gevent import wsgi import app monkey.patch_all() app = Flask(__name__) server = wsgi.WSGIServer(('203.29.62.211', 5050), app) server.serve_forever() ## Instruction: Add one route so that our monitoring system stops thinking this system is down ## Code After: from flask import Flask, render_template, jsonify, make_response, request, current_app from gevent import monkey from gevent import wsgi import app monkey.patch_all() app = Flask(__name__) server = wsgi.WSGIServer(('203.29.62.211', 5050), app) server.serve_forever() @app.route('/') def index(): return render_template('index.html')
# ... existing code ... server = wsgi.WSGIServer(('203.29.62.211', 5050), app) server.serve_forever() @app.route('/') def index(): return render_template('index.html') # ... rest of the code ...
9eb265fa2142b559b15063bc9322fc60b46a300b
mezzanine/project_template/deploy/gunicorn.conf.py
mezzanine/project_template/deploy/gunicorn.conf.py
from __future__ import unicode_literals import os bind = "127.0.0.1:%(gunicorn_port)s" workers = (os.sysconf("SC_NPROCESSORS_ONLN") * 2) + 1 loglevel = "error" proc_name = "%(proj_name)s"
from __future__ import unicode_literals import multiprocessing bind = "127.0.0.1:%(gunicorn_port)s" workers = multiprocessing.cpu_count() * 2 + 1 loglevel = "error" proc_name = "%(proj_name)s"
Update to use multiprocessing library
Update to use multiprocessing library
Python
bsd-2-clause
promil23/mezzanine,damnfine/mezzanine,Cajoline/mezzanine,stephenmcd/mezzanine,agepoly/mezzanine,damnfine/mezzanine,mush42/mezzanine,eino-makitalo/mezzanine,mush42/mezzanine,stephenmcd/mezzanine,stephenmcd/mezzanine,emile2016/mezzanine,frankier/mezzanine,promil23/mezzanine,readevalprint/mezzanine,webounty/mezzanine,dovydas/mezzanine,ryneeverett/mezzanine,wbtuomela/mezzanine,tuxinhang1989/mezzanine,cccs-web/mezzanine,joshcartme/mezzanine,tuxinhang1989/mezzanine,joshcartme/mezzanine,eino-makitalo/mezzanine,mush42/mezzanine,dsanders11/mezzanine,dekomote/mezzanine-modeltranslation-backport,Kniyl/mezzanine,vladir/mezzanine,tuxinhang1989/mezzanine,agepoly/mezzanine,AlexHill/mezzanine,molokov/mezzanine,Skytorn86/mezzanine,Kniyl/mezzanine,SoLoHiC/mezzanine,molokov/mezzanine,dustinrb/mezzanine,sjdines/mezzanine,gradel/mezzanine,saintbird/mezzanine,PegasusWang/mezzanine,cccs-web/mezzanine,PegasusWang/mezzanine,christianwgd/mezzanine,wyzex/mezzanine,adrian-the-git/mezzanine,sjdines/mezzanine,PegasusWang/mezzanine,agepoly/mezzanine,damnfine/mezzanine,eino-makitalo/mezzanine,frankchin/mezzanine,jerivas/mezzanine,viaregio/mezzanine,douglaskastle/mezzanine,douglaskastle/mezzanine,gradel/mezzanine,SoLoHiC/mezzanine,adrian-the-git/mezzanine,jjz/mezzanine,ZeroXn/mezzanine,biomassives/mezzanine,promil23/mezzanine,spookylukey/mezzanine,frankchin/mezzanine,fusionbox/mezzanine,frankchin/mezzanine,dsanders11/mezzanine,gradel/mezzanine,sjuxax/mezzanine,ZeroXn/mezzanine,viaregio/mezzanine,frankier/mezzanine,spookylukey/mezzanine,Skytorn86/mezzanine,jjz/mezzanine,saintbird/mezzanine,dsanders11/mezzanine,webounty/mezzanine,vladir/mezzanine,webounty/mezzanine,jerivas/mezzanine,readevalprint/mezzanine,wbtuomela/mezzanine,fusionbox/mezzanine,theclanks/mezzanine,ryneeverett/mezzanine,batpad/mezzanine,frankier/mezzanine,jjz/mezzanine,sjuxax/mezzanine,douglaskastle/mezzanine,Cajoline/mezzanine,dustinrb/mezzanine,emile2016/mezzanine,viaregio/mezzanine,dovydas/mezzanine,biomassives/mezzanine,AlexHill/mezzanine,molokov/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,nikolas/mezzanine,ryneeverett/mezzanine,dekomote/mezzanine-modeltranslation-backport,dovydas/mezzanine,sjdines/mezzanine,vladir/mezzanine,adrian-the-git/mezzanine,emile2016/mezzanine,industrydive/mezzanine,saintbird/mezzanine,batpad/mezzanine,spookylukey/mezzanine,geodesign/mezzanine,ZeroXn/mezzanine,sjuxax/mezzanine,industrydive/mezzanine,geodesign/mezzanine,SoLoHiC/mezzanine,wyzex/mezzanine,wbtuomela/mezzanine,nikolas/mezzanine,readevalprint/mezzanine,dustinrb/mezzanine,industrydive/mezzanine,Cajoline/mezzanine,Kniyl/mezzanine,Skytorn86/mezzanine,wyzex/mezzanine,nikolas/mezzanine,christianwgd/mezzanine,dekomote/mezzanine-modeltranslation-backport,joshcartme/mezzanine,Cicero-Zhao/mezzanine,christianwgd/mezzanine,geodesign/mezzanine,theclanks/mezzanine,biomassives/mezzanine,theclanks/mezzanine
python
## Code Before: from __future__ import unicode_literals import os bind = "127.0.0.1:%(gunicorn_port)s" workers = (os.sysconf("SC_NPROCESSORS_ONLN") * 2) + 1 loglevel = "error" proc_name = "%(proj_name)s" ## Instruction: Update to use multiprocessing library ## Code After: from __future__ import unicode_literals import multiprocessing bind = "127.0.0.1:%(gunicorn_port)s" workers = multiprocessing.cpu_count() * 2 + 1 loglevel = "error" proc_name = "%(proj_name)s"
# ... existing code ... from __future__ import unicode_literals import multiprocessing bind = "127.0.0.1:%(gunicorn_port)s" workers = multiprocessing.cpu_count() * 2 + 1 loglevel = "error" proc_name = "%(proj_name)s" # ... rest of the code ...
d84a47b875af42da3491c771e461b0a8ca5556db
tests/test_models.py
tests/test_models.py
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload)
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
Test the slug field is generated correctly
Test the slug field is generated correctly
Python
bsd-3-clause
dominicrodger/django-tinycontent,ad-m/django-tinycontent,watchdogpolska/django-tinycontent,ad-m/django-tinycontent,dominicrodger/django-tinycontent,watchdogpolska/django-tinycontent
python
## Code Before: import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) ## Instruction: Test the slug field is generated correctly ## Code After: import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
... @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug ...
a9092a0b5e42d7e306d197a8b638c7186646a97a
android/app/src/main/java/com/rnscm/SecretConfigManager.java
android/app/src/main/java/com/rnscm/SecretConfigManager.java
package com.rnscm; import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import java.util.Map; import java.util.HashMap; import java.lang.reflect.Field; public class SecretConfigManager extends ReactContextBaseJavaModule { public Class config; public SecretConfigManager(ReactApplicationContext reactContext, Class _config) { super(reactContext); config = _config; } @Override public String getName() { return "SecretConfigManager"; } private Map<String,String> configToMap() { Field[] declaredFields = config.getDeclaredFields(); Map<String, String> configMap = new HashMap<String, String>(); for (Field field : declaredFields) { if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) { String key = field.getName(); String val = (String)field.get(null); configMap.put(key, val); } } return configMap; } @ReactMethod public void getConfig(String name, Callback callback) { callback.invoke(configToMap()); } }
package com.rnscm; import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import java.util.Map; import java.util.HashMap; import java.lang.reflect.Field; public class SecretConfigManager extends ReactContextBaseJavaModule { public Class config; public SecretConfigManager(ReactApplicationContext reactContext, Class _config) { super(reactContext); config = _config; } @Override public String getName() { return "SecretConfigManager"; } private Map<String,String> configToMap() throws IllegalAccessException { Field[] declaredFields = config.getDeclaredFields(); Map<String, String> configMap = new HashMap<String, String>(); for (Field field : declaredFields) { if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) { String key = field.getName(); String val = (String)field.get(null); configMap.put(key, val); } } return configMap; } @ReactMethod public void getConfig(String name, Callback callback) { try { callback.invoke(configToMap()); } catch (IllegalAccessException err) { callback.invoke(err); } } }
Fix all errors in java code
Fix all errors in java code
Java
epl-1.0
savelichalex/friends-teach,savelichalex/friends-teach,savelichalex/friends-teach,savelichalex/friends-teach
java
## Code Before: package com.rnscm; import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import java.util.Map; import java.util.HashMap; import java.lang.reflect.Field; public class SecretConfigManager extends ReactContextBaseJavaModule { public Class config; public SecretConfigManager(ReactApplicationContext reactContext, Class _config) { super(reactContext); config = _config; } @Override public String getName() { return "SecretConfigManager"; } private Map<String,String> configToMap() { Field[] declaredFields = config.getDeclaredFields(); Map<String, String> configMap = new HashMap<String, String>(); for (Field field : declaredFields) { if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) { String key = field.getName(); String val = (String)field.get(null); configMap.put(key, val); } } return configMap; } @ReactMethod public void getConfig(String name, Callback callback) { callback.invoke(configToMap()); } } ## Instruction: Fix all errors in java code ## Code After: package com.rnscm; import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import java.util.Map; import java.util.HashMap; import java.lang.reflect.Field; public class SecretConfigManager extends ReactContextBaseJavaModule { public Class config; public SecretConfigManager(ReactApplicationContext reactContext, Class _config) { super(reactContext); config = _config; } @Override public String getName() { return "SecretConfigManager"; } private Map<String,String> configToMap() throws IllegalAccessException { Field[] declaredFields = config.getDeclaredFields(); Map<String, String> configMap = new HashMap<String, String>(); for (Field field : declaredFields) { if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) { String key = field.getName(); String val = (String)field.get(null); configMap.put(key, val); } } return configMap; } @ReactMethod public void getConfig(String name, Callback callback) { try { callback.invoke(configToMap()); } catch (IllegalAccessException err) { callback.invoke(err); } } }
# ... existing code ... return "SecretConfigManager"; } private Map<String,String> configToMap() throws IllegalAccessException { Field[] declaredFields = config.getDeclaredFields(); Map<String, String> configMap = new HashMap<String, String>(); for (Field field : declaredFields) { # ... modified code ... @ReactMethod public void getConfig(String name, Callback callback) { try { callback.invoke(configToMap()); } catch (IllegalAccessException err) { callback.invoke(err); } } } # ... rest of the code ...
427f02c7f6c93e15d219d975d337a97d74a88b42
convergence-tests/runall.py
convergence-tests/runall.py
import os import time import multiprocessing threads = 4 dev_null = "/dev/null" input_dir = "./convergence_inputs/" log_file = dev_null call = "nice -n 19 ionice -c2 -n7 ../build/main.x " call_end = " >> " + log_file syscall_arr = [] input_files = os.listdir(input_dir) if __name__ == "__main__": pool = multiprocessing.Pool(processes=threads) for fname in input_files: inp_path = input_dir + fname syscall = call + inp_path + call_end syscall_arr.append(syscall) if log_file is not dev_null: os.remove(log_file) start_time = time.time() pool.map(os.system, syscall_arr) pool.close() pool.join() end_time = time.time() print("Runtime: ", end_time-start_time)
import os import time import multiprocessing threads = 4 os.environ["OMP_NUM_THREADS"] = "1" dev_null = "/dev/null" input_dir = "./convergence_inputs/" log_file = "log.log" call = "nice -n 19 ionice -c2 -n7 ../build/main.x " call_end = " >> " + log_file syscall_arr = [] input_files = os.listdir(input_dir) if __name__ == "__main__": pool = multiprocessing.Pool(processes=threads) for fname in input_files: inp_path = input_dir + fname syscall = call + inp_path + call_end syscall_arr.append(syscall) if log_file is not dev_null: try: os.remove(log_file) except: pass start_time = time.time() pool.map(os.system, syscall_arr) pool.close() pool.join() end_time = time.time() print("Runtime: ", end_time-start_time)
Update parallel convergence test runs to not spawn OMP threads
Update parallel convergence test runs to not spawn OMP threads
Python
mit
kramer314/1d-vd-test,kramer314/1d-vd-test
python
## Code Before: import os import time import multiprocessing threads = 4 dev_null = "/dev/null" input_dir = "./convergence_inputs/" log_file = dev_null call = "nice -n 19 ionice -c2 -n7 ../build/main.x " call_end = " >> " + log_file syscall_arr = [] input_files = os.listdir(input_dir) if __name__ == "__main__": pool = multiprocessing.Pool(processes=threads) for fname in input_files: inp_path = input_dir + fname syscall = call + inp_path + call_end syscall_arr.append(syscall) if log_file is not dev_null: os.remove(log_file) start_time = time.time() pool.map(os.system, syscall_arr) pool.close() pool.join() end_time = time.time() print("Runtime: ", end_time-start_time) ## Instruction: Update parallel convergence test runs to not spawn OMP threads ## Code After: import os import time import multiprocessing threads = 4 os.environ["OMP_NUM_THREADS"] = "1" dev_null = "/dev/null" input_dir = "./convergence_inputs/" log_file = "log.log" call = "nice -n 19 ionice -c2 -n7 ../build/main.x " call_end = " >> " + log_file syscall_arr = [] input_files = os.listdir(input_dir) if __name__ == "__main__": pool = multiprocessing.Pool(processes=threads) for fname in input_files: inp_path = input_dir + fname syscall = call + inp_path + call_end syscall_arr.append(syscall) if log_file is not dev_null: try: os.remove(log_file) except: pass start_time = time.time() pool.map(os.system, syscall_arr) pool.close() pool.join() end_time = time.time() print("Runtime: ", end_time-start_time)
... threads = 4 os.environ["OMP_NUM_THREADS"] = "1" dev_null = "/dev/null" input_dir = "./convergence_inputs/" log_file = "log.log" call = "nice -n 19 ionice -c2 -n7 ../build/main.x " call_end = " >> " + log_file ... syscall_arr.append(syscall) if log_file is not dev_null: try: os.remove(log_file) except: pass start_time = time.time() ...
e2bb78a1587b7d5c0416c3632ca9674339826d55
src/yawf/creation.py
src/yawf/creation.py
from django.db import transaction from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR from yawf import get_workflow, get_workflow_by_instance from yawf import dispatch from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError @transaction.commit_on_success def create(workflow_type, sender, raw_parameters): workflow = get_workflow(workflow_type) if workflow is None: raise WorkflowNotLoadedError(workflow_type) form = workflow.create_form_cls(raw_parameters) if form.is_valid(): instance = workflow.instance_fabric(sender, form.cleaned_data) # Ensure that we will create, not update instance.id = None # Set workflow type setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type) instance.save() workflow.post_create_hook(sender, form.cleaned_data, instance) return instance else: raise CreateValidationError(form.errors) def start_workflow(obj, sender, start_message_params): workflow = get_workflow_by_instance(obj) if isinstance(workflow.start_workflow, basestring): return dispatch.dispatch(obj, sender, workflow.start_workflow) elif callable(workflow.start_workflow): start_message_id = workflow.start_workflow(obj, sender) return dispatch.dispatch(obj, sender, start_message_id, start_message_params) else: return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
from django.db import transaction from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR from yawf import get_workflow, get_workflow_by_instance from yawf import dispatch from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError @transaction.commit_on_success def create(workflow_type, sender, raw_parameters): workflow = get_workflow(workflow_type) if workflow is None: raise WorkflowNotLoadedError(workflow_type) form = workflow.create_form_cls(raw_parameters) if form.is_valid(): instance = workflow.instance_fabric(sender, form.cleaned_data) # Ensure that we will create, not update instance.id = None # Set workflow type setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type) instance.save() workflow.post_create_hook(sender, form.cleaned_data, instance) return instance else: raise CreateValidationError(form.errors) def start_workflow(obj, sender, start_message_params=None): if start_message_params is None: start_message_params = {} workflow = get_workflow_by_instance(obj) if isinstance(workflow.start_workflow, basestring): return dispatch.dispatch(obj, sender, workflow.start_workflow) elif callable(workflow.start_workflow): start_message_id = workflow.start_workflow(obj, sender) return dispatch.dispatch(obj, sender, start_message_id, start_message_params) else: return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
Make start_message_params optional in start_workflow()
Make start_message_params optional in start_workflow()
Python
mit
freevoid/yawf
python
## Code Before: from django.db import transaction from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR from yawf import get_workflow, get_workflow_by_instance from yawf import dispatch from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError @transaction.commit_on_success def create(workflow_type, sender, raw_parameters): workflow = get_workflow(workflow_type) if workflow is None: raise WorkflowNotLoadedError(workflow_type) form = workflow.create_form_cls(raw_parameters) if form.is_valid(): instance = workflow.instance_fabric(sender, form.cleaned_data) # Ensure that we will create, not update instance.id = None # Set workflow type setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type) instance.save() workflow.post_create_hook(sender, form.cleaned_data, instance) return instance else: raise CreateValidationError(form.errors) def start_workflow(obj, sender, start_message_params): workflow = get_workflow_by_instance(obj) if isinstance(workflow.start_workflow, basestring): return dispatch.dispatch(obj, sender, workflow.start_workflow) elif callable(workflow.start_workflow): start_message_id = workflow.start_workflow(obj, sender) return dispatch.dispatch(obj, sender, start_message_id, start_message_params) else: return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE) ## Instruction: Make start_message_params optional in start_workflow() ## Code After: from django.db import transaction from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR from yawf import get_workflow, get_workflow_by_instance from yawf import dispatch from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError @transaction.commit_on_success def create(workflow_type, sender, raw_parameters): workflow = get_workflow(workflow_type) if workflow is None: raise WorkflowNotLoadedError(workflow_type) form = workflow.create_form_cls(raw_parameters) if form.is_valid(): instance = workflow.instance_fabric(sender, form.cleaned_data) # Ensure that we will create, not update instance.id = None # Set workflow type setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type) instance.save() workflow.post_create_hook(sender, form.cleaned_data, instance) return instance else: raise CreateValidationError(form.errors) def start_workflow(obj, sender, start_message_params=None): if start_message_params is None: start_message_params = {} workflow = get_workflow_by_instance(obj) if isinstance(workflow.start_workflow, basestring): return dispatch.dispatch(obj, sender, workflow.start_workflow) elif callable(workflow.start_workflow): start_message_id = workflow.start_workflow(obj, sender) return dispatch.dispatch(obj, sender, start_message_id, start_message_params) else: return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
// ... existing code ... raise CreateValidationError(form.errors) def start_workflow(obj, sender, start_message_params=None): if start_message_params is None: start_message_params = {} workflow = get_workflow_by_instance(obj) if isinstance(workflow.start_workflow, basestring): // ... rest of the code ...
d000a2e3991c54b319bc7166d9d178b739170a46
polling_stations/apps/data_collection/management/commands/import_sheffield.py
polling_stations/apps/data_collection/management/commands/import_sheffield.py
from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Sheffield """ council_id = 'E08000019' districts_name = 'SCCPollingDistricts2015' stations_name = 'SCCPollingStations2015.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[1], } def station_record_to_dict(self, record): address = record[1] # remove postcode from end of address if present postcode_offset = -len(record[2]) if address[postcode_offset:] == record[2]: address = address[:postcode_offset].strip() # remove trailing comma if present if address[-1:] == ',': address = address[:-1] # replace commas with \n address = "\n".join(map(lambda x: x.strip(), address.split(','))) return { 'internal_council_id': record[0], 'postcode' : record[2], 'address' : address }
from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Sheffield """ council_id = 'E08000019' districts_name = 'SCCPollingDistricts2015' stations_name = 'SCCPollingStations2015.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[1], 'extra_id': record[0], 'name': record[1], } def station_record_to_dict(self, record): address = record[1] # remove postcode from end of address if present postcode_offset = -len(record[2]) if address[postcode_offset:] == record[2]: address = address[:postcode_offset].strip() # remove trailing comma if present if address[-1:] == ',': address = address[:-1] # replace commas with \n address = "\n".join(map(lambda x: x.strip(), address.split(','))) return { 'internal_council_id': record[0], 'postcode' : record[2], 'address' : address, 'polling_district_id': record[-1] }
Add polling_district_id in Sheffield import script
Add polling_district_id in Sheffield import script
Python
bsd-3-clause
DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations
python
## Code Before: from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Sheffield """ council_id = 'E08000019' districts_name = 'SCCPollingDistricts2015' stations_name = 'SCCPollingStations2015.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[1], } def station_record_to_dict(self, record): address = record[1] # remove postcode from end of address if present postcode_offset = -len(record[2]) if address[postcode_offset:] == record[2]: address = address[:postcode_offset].strip() # remove trailing comma if present if address[-1:] == ',': address = address[:-1] # replace commas with \n address = "\n".join(map(lambda x: x.strip(), address.split(','))) return { 'internal_council_id': record[0], 'postcode' : record[2], 'address' : address } ## Instruction: Add polling_district_id in Sheffield import script ## Code After: from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Sheffield """ council_id = 'E08000019' districts_name = 'SCCPollingDistricts2015' stations_name = 'SCCPollingStations2015.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[1], 'extra_id': record[0], 'name': record[1], } def station_record_to_dict(self, record): address = record[1] # remove postcode from end of address if present postcode_offset = -len(record[2]) if address[postcode_offset:] == record[2]: address = address[:postcode_offset].strip() # remove trailing comma if present if address[-1:] == ',': address = address[:-1] # replace commas with \n address = "\n".join(map(lambda x: x.strip(), address.split(','))) return { 'internal_council_id': record[0], 'postcode' : record[2], 'address' : address, 'polling_district_id': record[-1] }
# ... existing code ... def district_record_to_dict(self, record): return { 'internal_council_id': record[1], 'extra_id': record[0], 'name': record[1], } def station_record_to_dict(self, record): # ... modified code ... return { 'internal_council_id': record[0], 'postcode' : record[2], 'address' : address, 'polling_district_id': record[-1] } # ... rest of the code ...
8079ef89b47ac09b611a6bd34130df79f4372ad0
app/storage/src/main/java/com/fsck/k9/storage/migrations/MigrationTo75.kt
app/storage/src/main/java/com/fsck/k9/storage/migrations/MigrationTo75.kt
package com.fsck.k9.storage.migrations import android.database.sqlite.SQLiteDatabase import com.fsck.k9.mailstore.MigrationsHelper internal class MigrationTo75(private val db: SQLiteDatabase, private val migrationsHelper: MigrationsHelper) { fun updateAccountWithSpecialFolderIds() { val account = migrationsHelper.account setSpecialFolderId(account.inboxFolder, account::setInboxFolderId) setSpecialFolderId("K9MAIL_INTERNAL_OUTBOX", account::setOutboxFolderId) setSpecialFolderId(account.draftsFolder, account::setDraftsFolderId) setSpecialFolderId(account.sentFolder, account::setSentFolderId) setSpecialFolderId(account.trashFolder, account::setTrashFolderId) setSpecialFolderId(account.archiveFolder, account::setArchiveFolderId) setSpecialFolderId(account.spamFolder, account::setSpamFolderId) setSpecialFolderId(account.autoExpandFolder, account::setAutoExpandFolderId) migrationsHelper.saveAccount() } private fun setSpecialFolderId(serverId: String?, setFolderId: (Long) -> Unit) { if (serverId == null) return db.query("folders", arrayOf("id"), "server_id = ?", arrayOf(serverId), null, null, null).use { cursor -> if (cursor.moveToFirst()) { val folderId = cursor.getLong(0) setFolderId(folderId) } } } }
package com.fsck.k9.storage.migrations import android.database.sqlite.SQLiteDatabase import com.fsck.k9.mailstore.MigrationsHelper internal class MigrationTo75(private val db: SQLiteDatabase, private val migrationsHelper: MigrationsHelper) { fun updateAccountWithSpecialFolderIds() { val account = migrationsHelper.account setSpecialFolderId(account.inboxFolder, account::setInboxFolderId) setSpecialFolderId("K9MAIL_INTERNAL_OUTBOX", account::setOutboxFolderId) setSpecialFolderId(account.draftsFolder, account::setDraftsFolderId) setSpecialFolderId(account.sentFolder, account::setSentFolderId) setSpecialFolderId(account.trashFolder, account::setTrashFolderId) setSpecialFolderId(account.archiveFolder, account::setArchiveFolderId) setSpecialFolderId(account.spamFolder, account::setSpamFolderId) setSpecialFolderId(account.autoExpandFolder, account::setAutoExpandFolderId) account.inboxFolder = null account.draftsFolder = null account.sentFolder = null account.trashFolder = null account.archiveFolder = null account.spamFolder = null account.autoExpandFolder = null migrationsHelper.saveAccount() } private fun setSpecialFolderId(serverId: String?, setFolderId: (Long) -> Unit) { if (serverId == null) return db.query("folders", arrayOf("id"), "server_id = ?", arrayOf(serverId), null, null, null).use { cursor -> if (cursor.moveToFirst()) { val folderId = cursor.getLong(0) setFolderId(folderId) } } } }
Reset folder server IDs in Account
Reset folder server IDs in Account
Kotlin
apache-2.0
k9mail/k-9,cketti/k-9,k9mail/k-9,cketti/k-9,k9mail/k-9,cketti/k-9,cketti/k-9
kotlin
## Code Before: package com.fsck.k9.storage.migrations import android.database.sqlite.SQLiteDatabase import com.fsck.k9.mailstore.MigrationsHelper internal class MigrationTo75(private val db: SQLiteDatabase, private val migrationsHelper: MigrationsHelper) { fun updateAccountWithSpecialFolderIds() { val account = migrationsHelper.account setSpecialFolderId(account.inboxFolder, account::setInboxFolderId) setSpecialFolderId("K9MAIL_INTERNAL_OUTBOX", account::setOutboxFolderId) setSpecialFolderId(account.draftsFolder, account::setDraftsFolderId) setSpecialFolderId(account.sentFolder, account::setSentFolderId) setSpecialFolderId(account.trashFolder, account::setTrashFolderId) setSpecialFolderId(account.archiveFolder, account::setArchiveFolderId) setSpecialFolderId(account.spamFolder, account::setSpamFolderId) setSpecialFolderId(account.autoExpandFolder, account::setAutoExpandFolderId) migrationsHelper.saveAccount() } private fun setSpecialFolderId(serverId: String?, setFolderId: (Long) -> Unit) { if (serverId == null) return db.query("folders", arrayOf("id"), "server_id = ?", arrayOf(serverId), null, null, null).use { cursor -> if (cursor.moveToFirst()) { val folderId = cursor.getLong(0) setFolderId(folderId) } } } } ## Instruction: Reset folder server IDs in Account ## Code After: package com.fsck.k9.storage.migrations import android.database.sqlite.SQLiteDatabase import com.fsck.k9.mailstore.MigrationsHelper internal class MigrationTo75(private val db: SQLiteDatabase, private val migrationsHelper: MigrationsHelper) { fun updateAccountWithSpecialFolderIds() { val account = migrationsHelper.account setSpecialFolderId(account.inboxFolder, account::setInboxFolderId) setSpecialFolderId("K9MAIL_INTERNAL_OUTBOX", account::setOutboxFolderId) setSpecialFolderId(account.draftsFolder, account::setDraftsFolderId) setSpecialFolderId(account.sentFolder, account::setSentFolderId) setSpecialFolderId(account.trashFolder, account::setTrashFolderId) setSpecialFolderId(account.archiveFolder, account::setArchiveFolderId) setSpecialFolderId(account.spamFolder, account::setSpamFolderId) setSpecialFolderId(account.autoExpandFolder, account::setAutoExpandFolderId) account.inboxFolder = null account.draftsFolder = null account.sentFolder = null account.trashFolder = null account.archiveFolder = null account.spamFolder = null account.autoExpandFolder = null migrationsHelper.saveAccount() } private fun setSpecialFolderId(serverId: String?, setFolderId: (Long) -> Unit) { if (serverId == null) return db.query("folders", arrayOf("id"), "server_id = ?", arrayOf(serverId), null, null, null).use { cursor -> if (cursor.moveToFirst()) { val folderId = cursor.getLong(0) setFolderId(folderId) } } } }
# ... existing code ... setSpecialFolderId(account.spamFolder, account::setSpamFolderId) setSpecialFolderId(account.autoExpandFolder, account::setAutoExpandFolderId) account.inboxFolder = null account.draftsFolder = null account.sentFolder = null account.trashFolder = null account.archiveFolder = null account.spamFolder = null account.autoExpandFolder = null migrationsHelper.saveAccount() } # ... rest of the code ...
12cc3ca56707248049ae519a3f2a44530ccdfa48
src/fitnesse/http/MockRequestBuilder.java
src/fitnesse/http/MockRequestBuilder.java
package fitnesse.http; import fitnesse.http.MockRequest; import fitnesse.http.Request; public class MockRequestBuilder { protected String specification; private boolean noChunk = false; public MockRequestBuilder(String specification) { this.specification = specification; validate(); } public Request build() { MockRequest request = new MockRequest(); request.parseRequestUri(getCommand()); if (hasCredentials()) { request.setCredentials(getUsername(), getPassword()); } if (noChunk) { request.addInput("noChunk", true); } return request; } private String getCommand() { String actualCommand = null; if (hasCredentials()) actualCommand = commandParts()[2]; else actualCommand = specification; if (actualCommand.startsWith("/")) return actualCommand; else return "/" + actualCommand; } private boolean hasCredentials() { return (commandParts().length == 3); } private boolean hasNoCredentials() { return (commandParts().length == 1); } private void validate() { if (!hasCredentials() && !hasNoCredentials()) throw new IllegalArgumentException("Command specification [" + specification + "] invalid. Format shold be /cmd or user:pass:/cmd"); } private String[] commandParts() { return specification.split(":"); } private String getUsername() { return commandParts()[0]; } private String getPassword() { return commandParts()[1]; } public MockRequestBuilder noChunk() { this.noChunk = true; return this; } }
package fitnesse.http; public class MockRequestBuilder { protected String specification; private boolean noChunk = false; public MockRequestBuilder(String specification) { this.specification = specification; validate(); } public Request build() { MockRequest request = new MockRequest(); request.parseRequestUri(getCommand()); if (hasCredentials()) { request.setCredentials(getUsername(), getPassword()); } if (noChunk) { request.addInput("nochunk", true); } return request; } private String getCommand() { String actualCommand = null; if (hasCredentials()) actualCommand = commandParts()[2]; else actualCommand = specification; if (actualCommand.startsWith("/")) return actualCommand; else return "/" + actualCommand; } private boolean hasCredentials() { return (commandParts().length == 3); } private boolean hasNoCredentials() { return (commandParts().length == 1); } private void validate() { if (!hasCredentials() && !hasNoCredentials()) throw new IllegalArgumentException("Command specification [" + specification + "] invalid. Format shold be /cmd or user:pass:/cmd"); } private String[] commandParts() { return specification.split(":"); } private String getUsername() { return commandParts()[0]; } private String getPassword() { return commandParts()[1]; } public MockRequestBuilder noChunk() { this.noChunk = true; return this; } }
Fix issue with chunked reponse from command line execution.
Fix issue with chunked reponse from command line execution.
Java
epl-1.0
amolenaar/fitnesse,rbevers/fitnesse,amolenaar/fitnesse,rbevers/fitnesse,hansjoachim/fitnesse,hansjoachim/fitnesse,amolenaar/fitnesse,rbevers/fitnesse,jdufner/fitnesse,jdufner/fitnesse,hansjoachim/fitnesse,jdufner/fitnesse
java
## Code Before: package fitnesse.http; import fitnesse.http.MockRequest; import fitnesse.http.Request; public class MockRequestBuilder { protected String specification; private boolean noChunk = false; public MockRequestBuilder(String specification) { this.specification = specification; validate(); } public Request build() { MockRequest request = new MockRequest(); request.parseRequestUri(getCommand()); if (hasCredentials()) { request.setCredentials(getUsername(), getPassword()); } if (noChunk) { request.addInput("noChunk", true); } return request; } private String getCommand() { String actualCommand = null; if (hasCredentials()) actualCommand = commandParts()[2]; else actualCommand = specification; if (actualCommand.startsWith("/")) return actualCommand; else return "/" + actualCommand; } private boolean hasCredentials() { return (commandParts().length == 3); } private boolean hasNoCredentials() { return (commandParts().length == 1); } private void validate() { if (!hasCredentials() && !hasNoCredentials()) throw new IllegalArgumentException("Command specification [" + specification + "] invalid. Format shold be /cmd or user:pass:/cmd"); } private String[] commandParts() { return specification.split(":"); } private String getUsername() { return commandParts()[0]; } private String getPassword() { return commandParts()[1]; } public MockRequestBuilder noChunk() { this.noChunk = true; return this; } } ## Instruction: Fix issue with chunked reponse from command line execution. ## Code After: package fitnesse.http; public class MockRequestBuilder { protected String specification; private boolean noChunk = false; public MockRequestBuilder(String specification) { this.specification = specification; validate(); } public Request build() { MockRequest request = new MockRequest(); request.parseRequestUri(getCommand()); if (hasCredentials()) { request.setCredentials(getUsername(), getPassword()); } if (noChunk) { request.addInput("nochunk", true); } return request; } private String getCommand() { String actualCommand = null; if (hasCredentials()) actualCommand = commandParts()[2]; else actualCommand = specification; if (actualCommand.startsWith("/")) return actualCommand; else return "/" + actualCommand; } private boolean hasCredentials() { return (commandParts().length == 3); } private boolean hasNoCredentials() { return (commandParts().length == 1); } private void validate() { if (!hasCredentials() && !hasNoCredentials()) throw new IllegalArgumentException("Command specification [" + specification + "] invalid. Format shold be /cmd or user:pass:/cmd"); } private String[] commandParts() { return specification.split(":"); } private String getUsername() { return commandParts()[0]; } private String getPassword() { return commandParts()[1]; } public MockRequestBuilder noChunk() { this.noChunk = true; return this; } }
... package fitnesse.http; public class MockRequestBuilder { protected String specification; ... request.setCredentials(getUsername(), getPassword()); } if (noChunk) { request.addInput("nochunk", true); } return request; } ...
22a024856b6fa602ee9d6fd7fb6031dde359cc9c
pytablewriter/writer/text/_csv.py
pytablewriter/writer/text/_csv.py
from typing import List import typepy from ._text_writer import TextTableWriter class CsvTableWriter(TextTableWriter): """ A table writer class for character separated values format. The default separated character is a comma (``","``). :Example: :ref:`example-csv-table-writer` """ FORMAT_NAME = "csv" @property def format_name(self) -> str: return self.FORMAT_NAME @property def support_split_write(self) -> bool: return True def __init__(self) -> None: super().__init__() self.indent_string = "" self.column_delimiter = "," self.is_padding = False self.is_formatting_float = False self.is_write_header_separator_row = False self._quoting_flags[typepy.Typecode.NULL_STRING] = False def _write_header(self) -> None: if typepy.is_empty_sequence(self.headers): return super()._write_header() def _get_opening_row_items(self) -> List[str]: return [] def _get_value_row_separator_items(self) -> List[str]: return [] def _get_closing_row_items(self) -> List[str]: return []
from typing import List import typepy from ._text_writer import TextTableWriter class CsvTableWriter(TextTableWriter): """ A table writer class for character separated values format. The default separated character is a comma (``","``). :Example: :ref:`example-csv-table-writer` """ FORMAT_NAME = "csv" @property def format_name(self) -> str: return self.FORMAT_NAME @property def support_split_write(self) -> bool: return True def __init__(self) -> None: super().__init__() self._set_chars("") self.indent_string = "" self.column_delimiter = "," self.is_padding = False self.is_formatting_float = False self.is_write_header_separator_row = False self._quoting_flags[typepy.Typecode.NULL_STRING] = False def _write_header(self) -> None: if typepy.is_empty_sequence(self.headers): return super()._write_header() def _get_opening_row_items(self) -> List[str]: return [] def _get_value_row_separator_items(self) -> List[str]: return [] def _get_closing_row_items(self) -> List[str]: return []
Modify initialization to be more properly for CsvTableWriter class
Modify initialization to be more properly for CsvTableWriter class
Python
mit
thombashi/pytablewriter
python
## Code Before: from typing import List import typepy from ._text_writer import TextTableWriter class CsvTableWriter(TextTableWriter): """ A table writer class for character separated values format. The default separated character is a comma (``","``). :Example: :ref:`example-csv-table-writer` """ FORMAT_NAME = "csv" @property def format_name(self) -> str: return self.FORMAT_NAME @property def support_split_write(self) -> bool: return True def __init__(self) -> None: super().__init__() self.indent_string = "" self.column_delimiter = "," self.is_padding = False self.is_formatting_float = False self.is_write_header_separator_row = False self._quoting_flags[typepy.Typecode.NULL_STRING] = False def _write_header(self) -> None: if typepy.is_empty_sequence(self.headers): return super()._write_header() def _get_opening_row_items(self) -> List[str]: return [] def _get_value_row_separator_items(self) -> List[str]: return [] def _get_closing_row_items(self) -> List[str]: return [] ## Instruction: Modify initialization to be more properly for CsvTableWriter class ## Code After: from typing import List import typepy from ._text_writer import TextTableWriter class CsvTableWriter(TextTableWriter): """ A table writer class for character separated values format. The default separated character is a comma (``","``). :Example: :ref:`example-csv-table-writer` """ FORMAT_NAME = "csv" @property def format_name(self) -> str: return self.FORMAT_NAME @property def support_split_write(self) -> bool: return True def __init__(self) -> None: super().__init__() self._set_chars("") self.indent_string = "" self.column_delimiter = "," self.is_padding = False self.is_formatting_float = False self.is_write_header_separator_row = False self._quoting_flags[typepy.Typecode.NULL_STRING] = False def _write_header(self) -> None: if typepy.is_empty_sequence(self.headers): return super()._write_header() def _get_opening_row_items(self) -> List[str]: return [] def _get_value_row_separator_items(self) -> List[str]: return [] def _get_closing_row_items(self) -> List[str]: return []
... def __init__(self) -> None: super().__init__() self._set_chars("") self.indent_string = "" self.column_delimiter = "," self.is_padding = False self.is_formatting_float = False self.is_write_header_separator_row = False ...
96a5329dd36f176c6d1ac3c9ab4d68cf20224680
src/main/java/de/craften/plugins/educraft/luaapi/functions/WaitFunction.java
src/main/java/de/craften/plugins/educraft/luaapi/functions/WaitFunction.java
package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.luaj.vm2.LuaError; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; /** * A function to wait for a given duration. */ public class WaitFunction extends EduCraftApiFunction { private static final long DEFAULT_TIME = 1000; @Override public Varargs execute(Varargs varargs) { try { Thread.sleep(varargs.optlong(1, DEFAULT_TIME)); } catch (InterruptedException e) { throw new LuaError(e); } return LuaValue.NIL; } }
package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.luaj.vm2.LuaError; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; /** * A function to wait for a given duration. */ public class WaitFunction extends EduCraftApiFunction { private static final long DEFAULT_TIME = 1000; @Override public Varargs invoke(Varargs varargs) { try { Thread.sleep(varargs.optlong(1, DEFAULT_TIME) + getApi().getFunctionDelay()); } catch (InterruptedException e) { throw new LuaError(e); } return LuaValue.NIL; } @Override public Varargs execute(Varargs varargs) { return LuaValue.NIL; //no-op (this is not invoked) } }
Fix wait function blocking the main thread.
Fix wait function blocking the main thread.
Java
mit
leMaik/EduCraft
java
## Code Before: package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.luaj.vm2.LuaError; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; /** * A function to wait for a given duration. */ public class WaitFunction extends EduCraftApiFunction { private static final long DEFAULT_TIME = 1000; @Override public Varargs execute(Varargs varargs) { try { Thread.sleep(varargs.optlong(1, DEFAULT_TIME)); } catch (InterruptedException e) { throw new LuaError(e); } return LuaValue.NIL; } } ## Instruction: Fix wait function blocking the main thread. ## Code After: package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.luaj.vm2.LuaError; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; /** * A function to wait for a given duration. */ public class WaitFunction extends EduCraftApiFunction { private static final long DEFAULT_TIME = 1000; @Override public Varargs invoke(Varargs varargs) { try { Thread.sleep(varargs.optlong(1, DEFAULT_TIME) + getApi().getFunctionDelay()); } catch (InterruptedException e) { throw new LuaError(e); } return LuaValue.NIL; } @Override public Varargs execute(Varargs varargs) { return LuaValue.NIL; //no-op (this is not invoked) } }
... private static final long DEFAULT_TIME = 1000; @Override public Varargs invoke(Varargs varargs) { try { Thread.sleep(varargs.optlong(1, DEFAULT_TIME) + getApi().getFunctionDelay()); } catch (InterruptedException e) { throw new LuaError(e); } return LuaValue.NIL; } @Override public Varargs execute(Varargs varargs) { return LuaValue.NIL; //no-op (this is not invoked) } } ...
f96d26e8686cb2d1a15860414b90e48418e41f38
tests/integration/conftest.py
tests/integration/conftest.py
import pytest import io import contextlib import tempfile import shutil import os from xd.docker.client import * DOCKER_HOST = os.environ.get('DOCKER_HOST', None) @pytest.fixture(scope="module") def docker(request): return DockerClient(host=DOCKER_HOST) class StreamRedirector(object): def __init__(self): self.stream = io.StringIO() def redirect(self): return contextlib.redirect_stdout(self.stream) def get(self): return self.stream.getvalue() def getlines(self): return self.stream.getvalue().rstrip('\n').split('\n') def lastline(self): lines = self.getlines() if not lines: return None return lines[-1] @pytest.fixture def stdout(): return StreamRedirector() @pytest.fixture def cleandir(request): newdir = tempfile.mkdtemp() os.chdir(newdir) def remove_cleandir(): shutil.rmtree(newdir) request.addfinalizer(remove_cleandir) return newdir
import pytest import io import contextlib import tempfile import shutil import os from xd.docker.client import * DOCKER_HOST = os.environ.get('DOCKER_HOST', None) @pytest.fixture(scope="function") def docker(request): os.system("for c in `docker ps -a -q`;do docker rm $c;done") os.system("for i in `docker images -q`;do docker rmi $i;done") return DockerClient(host=DOCKER_HOST) class StreamRedirector(object): def __init__(self): self.stream = io.StringIO() def redirect(self): return contextlib.redirect_stdout(self.stream) def get(self): return self.stream.getvalue() def getlines(self): return self.stream.getvalue().rstrip('\n').split('\n') def lastline(self): lines = self.getlines() if not lines: return None return lines[-1] @pytest.fixture def stdout(): return StreamRedirector() @pytest.fixture def cleandir(request): newdir = tempfile.mkdtemp() os.chdir(newdir) def remove_cleandir(): shutil.rmtree(newdir) request.addfinalizer(remove_cleandir) return newdir
Purge images and containers before each test
tests: Purge images and containers before each test Signed-off-by: Esben Haabendal <[email protected]>
Python
mit
XD-embedded/xd-docker,XD-embedded/xd-docker,esben/xd-docker,esben/xd-docker
python
## Code Before: import pytest import io import contextlib import tempfile import shutil import os from xd.docker.client import * DOCKER_HOST = os.environ.get('DOCKER_HOST', None) @pytest.fixture(scope="module") def docker(request): return DockerClient(host=DOCKER_HOST) class StreamRedirector(object): def __init__(self): self.stream = io.StringIO() def redirect(self): return contextlib.redirect_stdout(self.stream) def get(self): return self.stream.getvalue() def getlines(self): return self.stream.getvalue().rstrip('\n').split('\n') def lastline(self): lines = self.getlines() if not lines: return None return lines[-1] @pytest.fixture def stdout(): return StreamRedirector() @pytest.fixture def cleandir(request): newdir = tempfile.mkdtemp() os.chdir(newdir) def remove_cleandir(): shutil.rmtree(newdir) request.addfinalizer(remove_cleandir) return newdir ## Instruction: tests: Purge images and containers before each test Signed-off-by: Esben Haabendal <[email protected]> ## Code After: import pytest import io import contextlib import tempfile import shutil import os from xd.docker.client import * DOCKER_HOST = os.environ.get('DOCKER_HOST', None) @pytest.fixture(scope="function") def docker(request): os.system("for c in `docker ps -a -q`;do docker rm $c;done") os.system("for i in `docker images -q`;do docker rmi $i;done") return DockerClient(host=DOCKER_HOST) class StreamRedirector(object): def __init__(self): self.stream = io.StringIO() def redirect(self): return contextlib.redirect_stdout(self.stream) def get(self): return self.stream.getvalue() def getlines(self): return self.stream.getvalue().rstrip('\n').split('\n') def lastline(self): lines = self.getlines() if not lines: return None return lines[-1] @pytest.fixture def stdout(): return StreamRedirector() @pytest.fixture def cleandir(request): newdir = tempfile.mkdtemp() os.chdir(newdir) def remove_cleandir(): shutil.rmtree(newdir) request.addfinalizer(remove_cleandir) return newdir
# ... existing code ... DOCKER_HOST = os.environ.get('DOCKER_HOST', None) @pytest.fixture(scope="function") def docker(request): os.system("for c in `docker ps -a -q`;do docker rm $c;done") os.system("for i in `docker images -q`;do docker rmi $i;done") return DockerClient(host=DOCKER_HOST) # ... rest of the code ...
dfa50b46918b5d4401d0afbdcbadc126f51d4f99
src/main/java/org/konstructs/example/MyPlugin.java
src/main/java/org/konstructs/example/MyPlugin.java
package org.konstructs.example; import akka.actor.UntypedActor; import akka.actor.Props; import konstructs.plugin.Config; import konstructs.plugin.PluginConstructor; import konstructs.api.SayFilter; import konstructs.api.Say; public class MyPlugin extends UntypedActor { private String responseText; private String pluginName; public MyPlugin(String pluginName, String responseText) { this.responseText = responseText; this.pluginName = pluginName; } public void onReceive(Object message) { if(message instanceof SayFilter) { SayFilter say = (SayFilter)message; String text = say.message().text(); say.continueWith(new Say("Did you just say: " + text + "?"), getSender()); } else { unhandled(message); } } @PluginConstructor public static Props props(String pluginName, @Config(key = "response-text") String responseText) { return Props.create(MyPlugin.class, pluginName, responseText); } }
package org.konstructs.example; import akka.actor.UntypedActor; import akka.actor.Props; import konstructs.plugin.Config; import konstructs.plugin.PluginConstructor; import konstructs.api.SayFilter; import konstructs.api.Say; public class MyPlugin extends UntypedActor { private String responseText; private String pluginName; public MyPlugin(String pluginName, String responseText) { this.responseText = responseText; this.pluginName = pluginName; } public void onReceive(Object message) { if(message instanceof SayFilter) { SayFilter say = (SayFilter)message; String text = say.message().text(); say.continueWith(new Say(text + " <- " + responseText), getSender()); } else { unhandled(message); } } @PluginConstructor public static Props props(String pluginName, @Config(key = "response-text") String responseText) { return Props.create(MyPlugin.class, pluginName, responseText); } }
Use configured message to show configuration
Use configured message to show configuration
Java
mit
konstructs/server-plugin-forest
java
## Code Before: package org.konstructs.example; import akka.actor.UntypedActor; import akka.actor.Props; import konstructs.plugin.Config; import konstructs.plugin.PluginConstructor; import konstructs.api.SayFilter; import konstructs.api.Say; public class MyPlugin extends UntypedActor { private String responseText; private String pluginName; public MyPlugin(String pluginName, String responseText) { this.responseText = responseText; this.pluginName = pluginName; } public void onReceive(Object message) { if(message instanceof SayFilter) { SayFilter say = (SayFilter)message; String text = say.message().text(); say.continueWith(new Say("Did you just say: " + text + "?"), getSender()); } else { unhandled(message); } } @PluginConstructor public static Props props(String pluginName, @Config(key = "response-text") String responseText) { return Props.create(MyPlugin.class, pluginName, responseText); } } ## Instruction: Use configured message to show configuration ## Code After: package org.konstructs.example; import akka.actor.UntypedActor; import akka.actor.Props; import konstructs.plugin.Config; import konstructs.plugin.PluginConstructor; import konstructs.api.SayFilter; import konstructs.api.Say; public class MyPlugin extends UntypedActor { private String responseText; private String pluginName; public MyPlugin(String pluginName, String responseText) { this.responseText = responseText; this.pluginName = pluginName; } public void onReceive(Object message) { if(message instanceof SayFilter) { SayFilter say = (SayFilter)message; String text = say.message().text(); say.continueWith(new Say(text + " <- " + responseText), getSender()); } else { unhandled(message); } } @PluginConstructor public static Props props(String pluginName, @Config(key = "response-text") String responseText) { return Props.create(MyPlugin.class, pluginName, responseText); } }
// ... existing code ... if(message instanceof SayFilter) { SayFilter say = (SayFilter)message; String text = say.message().text(); say.continueWith(new Say(text + " <- " + responseText), getSender()); } else { unhandled(message); } // ... rest of the code ...
27434395a599d7e42d2295056396937d89bb53a6
setup.py
setup.py
import sys from setuptools import find_packages, setup VERSION = '2.0.dev0' install_requires = [ 'django-local-settings>=1.0a10', 'stashward', ] if sys.version_info[:2] < (3, 4): install_requires.append('enum34') setup( name='django-arcutils', version=VERSION, url='https://github.com/PSU-OIT-ARC/django-arcutils', author='PSU - OIT - ARC', author_email='[email protected]', description='Common utilities used in ARC Django projects', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=install_requires, extras_require={ 'cas': [ 'django-cas-client>=1.2.0', ], 'ldap': [ 'certifi>=2015.11.20.1', 'ldap3>=1.0.3', ], 'dev': [ 'django>=1.7,<1.9', 'flake8', 'ldap3', 'mock', 'model_mommy', ], }, entry_points=""" [console_scripts] arcutils = arcutils.__main__:main """, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
import sys from setuptools import find_packages, setup VERSION = '2.0.dev0' install_requires = [ 'django-local-settings>=1.0a10', 'stashward', ] if sys.version_info[:2] < (3, 4): install_requires.append('enum34') setup( name='django-arcutils', version=VERSION, url='https://github.com/PSU-OIT-ARC/django-arcutils', author='PSU - OIT - ARC', author_email='[email protected]', description='Common utilities used in ARC Django projects', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=install_requires, extras_require={ 'cas': [ 'django-cas-client>=1.2.0', ], 'ldap': [ 'certifi>=2015.11.20.1', 'ldap3>=1.0.3', ], 'dev': [ 'django>=1.7,<1.9', 'djangorestframework>3.3', 'flake8', 'ldap3', 'mock', 'model_mommy', ], }, entry_points=""" [console_scripts] arcutils = arcutils.__main__:main """, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Add DRF as a dev dependency
Add DRF as a dev dependency
Python
mit
PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils
python
## Code Before: import sys from setuptools import find_packages, setup VERSION = '2.0.dev0' install_requires = [ 'django-local-settings>=1.0a10', 'stashward', ] if sys.version_info[:2] < (3, 4): install_requires.append('enum34') setup( name='django-arcutils', version=VERSION, url='https://github.com/PSU-OIT-ARC/django-arcutils', author='PSU - OIT - ARC', author_email='[email protected]', description='Common utilities used in ARC Django projects', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=install_requires, extras_require={ 'cas': [ 'django-cas-client>=1.2.0', ], 'ldap': [ 'certifi>=2015.11.20.1', 'ldap3>=1.0.3', ], 'dev': [ 'django>=1.7,<1.9', 'flake8', 'ldap3', 'mock', 'model_mommy', ], }, entry_points=""" [console_scripts] arcutils = arcutils.__main__:main """, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], ) ## Instruction: Add DRF as a dev dependency ## Code After: import sys from setuptools import find_packages, setup VERSION = '2.0.dev0' install_requires = [ 'django-local-settings>=1.0a10', 'stashward', ] if sys.version_info[:2] < (3, 4): install_requires.append('enum34') setup( name='django-arcutils', version=VERSION, url='https://github.com/PSU-OIT-ARC/django-arcutils', author='PSU - OIT - ARC', author_email='[email protected]', description='Common utilities used in ARC Django projects', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=install_requires, extras_require={ 'cas': [ 'django-cas-client>=1.2.0', ], 'ldap': [ 'certifi>=2015.11.20.1', 'ldap3>=1.0.3', ], 'dev': [ 'django>=1.7,<1.9', 'djangorestframework>3.3', 'flake8', 'ldap3', 'mock', 'model_mommy', ], }, entry_points=""" [console_scripts] arcutils = arcutils.__main__:main """, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
# ... existing code ... ], 'dev': [ 'django>=1.7,<1.9', 'djangorestframework>3.3', 'flake8', 'ldap3', 'mock', # ... rest of the code ...
fe4ce6dfa26c60747b6024fa9f6d991aa3b95614
scripts/codegen_driverwrappers/generate_driver_wrappers.py
scripts/codegen_driverwrappers/generate_driver_wrappers.py
import sys import json import os import jinja2 def render(tpl_path): path, filename = os.path.split(tpl_path) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './') ).get_template(filename).render() n = len(sys.argv) if ( n != 3 ): sys.exit("The template file name and output file name are expected as arguments") # set template file name, output file name driver_wrapper_template_filename = sys.argv[1] driver_wrapper_output_filename = sys.argv[2] # render the template result = render(driver_wrapper_template_filename) # write output to file outFile = open(driver_wrapper_output_filename,"w") outFile.write(result) outFile.close()
import sys import json import os import jinja2 def render(tpl_path): path, filename = os.path.split(tpl_path) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './'), keep_trailing_newline=True, ).get_template(filename).render() n = len(sys.argv) if ( n != 3 ): sys.exit("The template file name and output file name are expected as arguments") # set template file name, output file name driver_wrapper_template_filename = sys.argv[1] driver_wrapper_output_filename = sys.argv[2] # render the template result = render(driver_wrapper_template_filename) # write output to file outFile = open(driver_wrapper_output_filename,"w") outFile.write(result) outFile.close()
Fix trailing newline getting dropped
Fix trailing newline getting dropped Signed-off-by: Gilles Peskine <[email protected]>
Python
apache-2.0
Mbed-TLS/mbedtls,NXPmicro/mbedtls,NXPmicro/mbedtls,Mbed-TLS/mbedtls,NXPmicro/mbedtls,NXPmicro/mbedtls,ARMmbed/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls,ARMmbed/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls
python
## Code Before: import sys import json import os import jinja2 def render(tpl_path): path, filename = os.path.split(tpl_path) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './') ).get_template(filename).render() n = len(sys.argv) if ( n != 3 ): sys.exit("The template file name and output file name are expected as arguments") # set template file name, output file name driver_wrapper_template_filename = sys.argv[1] driver_wrapper_output_filename = sys.argv[2] # render the template result = render(driver_wrapper_template_filename) # write output to file outFile = open(driver_wrapper_output_filename,"w") outFile.write(result) outFile.close() ## Instruction: Fix trailing newline getting dropped Signed-off-by: Gilles Peskine <[email protected]> ## Code After: import sys import json import os import jinja2 def render(tpl_path): path, filename = os.path.split(tpl_path) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './'), keep_trailing_newline=True, ).get_template(filename).render() n = len(sys.argv) if ( n != 3 ): sys.exit("The template file name and output file name are expected as arguments") # set template file name, output file name driver_wrapper_template_filename = sys.argv[1] driver_wrapper_output_filename = sys.argv[2] # render the template result = render(driver_wrapper_template_filename) # write output to file outFile = open(driver_wrapper_output_filename,"w") outFile.write(result) outFile.close()
... def render(tpl_path): path, filename = os.path.split(tpl_path) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './'), keep_trailing_newline=True, ).get_template(filename).render() n = len(sys.argv) ...
d80c726fcf36a2dc439ce12717f9e88161501358
gather/node/models.py
gather/node/models.py
from flask.ext.sqlalchemy import models_committed from gather.extensions import db, cache class Node(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False, unique=True, index=True) slug = db.Column(db.String(100), nullable=False, unique=True, index=True) description = db.Column(db.String(500), nullable=True, default="") icon = db.Column(db.String(100), nullable=True, default="") def __str__(self): return self.name def __repr__(self): return '<Node: %s>' % self.name @classmethod def query_all(cls): return cls.query.all() def to_dict(self): return dict( id=self.id, name=self.name, slug=self.slug, description=self.description, icon=self.icon ) def save(self): db.session.add(self) db.session.commit() return self
from flask.ext.sqlalchemy import models_committed from gather.extensions import db, cache class Node(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False, unique=True, index=True) slug = db.Column(db.String(100), nullable=False, unique=True, index=True) description = db.Column(db.String(500), nullable=True, default="") icon = db.Column(db.String(100), nullable=True, default="") def __str__(self): return self.name def __repr__(self): return '<Node: %s>' % self.name @classmethod def query_all(cls): return cls.query.order_by(Node.name.asc()).all() def to_dict(self): return dict( id=self.id, name=self.name, slug=self.slug, description=self.description, icon=self.icon ) def save(self): db.session.add(self) db.session.commit() return self
Add order in nodes in topic creation form
Add order in nodes in topic creation form
Python
mit
whtsky/Gather,whtsky/Gather
python
## Code Before: from flask.ext.sqlalchemy import models_committed from gather.extensions import db, cache class Node(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False, unique=True, index=True) slug = db.Column(db.String(100), nullable=False, unique=True, index=True) description = db.Column(db.String(500), nullable=True, default="") icon = db.Column(db.String(100), nullable=True, default="") def __str__(self): return self.name def __repr__(self): return '<Node: %s>' % self.name @classmethod def query_all(cls): return cls.query.all() def to_dict(self): return dict( id=self.id, name=self.name, slug=self.slug, description=self.description, icon=self.icon ) def save(self): db.session.add(self) db.session.commit() return self ## Instruction: Add order in nodes in topic creation form ## Code After: from flask.ext.sqlalchemy import models_committed from gather.extensions import db, cache class Node(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False, unique=True, index=True) slug = db.Column(db.String(100), nullable=False, unique=True, index=True) description = db.Column(db.String(500), nullable=True, default="") icon = db.Column(db.String(100), nullable=True, default="") def __str__(self): return self.name def __repr__(self): return '<Node: %s>' % self.name @classmethod def query_all(cls): return cls.query.order_by(Node.name.asc()).all() def to_dict(self): return dict( id=self.id, name=self.name, slug=self.slug, description=self.description, icon=self.icon ) def save(self): db.session.add(self) db.session.commit() return self
// ... existing code ... @classmethod def query_all(cls): return cls.query.order_by(Node.name.asc()).all() def to_dict(self): return dict( // ... rest of the code ...
30470437a86a58e5d89167f24206227737a04cf8
src/integration_tests/test_validation.py
src/integration_tests/test_validation.py
import pytest from tests import base from buildercore import cfngen, project import logging LOG = logging.getLogger(__name__) logging.disable(logging.NOTSET) # re-enables logging during integration testing # Depends on talking to AWS. class TestValidationFixtures(base.BaseCase): def test_validation(self): "dummy projects and their alternative configurations pass validation" for pname in project.aws_projects().keys(): cfngen.validate_project(pname) class TestValidationElife(): def setUp(self): # HERE BE DRAGONS # resets the testing config.SETTINGS_FILE we set in the base.BaseCase class base.switch_out_test_settings() def tearDown(self): base.switch_in_test_settings() @pytest.mark.parametrize("project_name", project.aws_projects().keys()) def test_validation_elife_projects(self, project_name, filter_project_name): "elife projects (and their alternative configurations) that come with the builder pass validation" if filter_project_name: if project_name != filter_project_name: pytest.skip("Filtered out through filter_project_name") cfngen.validate_project(project_name)
import pytest from tests import base from buildercore import cfngen, project import logging LOG = logging.getLogger(__name__) logging.disable(logging.NOTSET) # re-enables logging during integration testing # Depends on talking to AWS. class TestValidationFixtures(base.BaseCase): def test_validation(self): "dummy projects and their alternative configurations pass validation" for pname in project.aws_projects().keys(): cfngen.validate_project(pname) class TestValidationElife(): @classmethod def setup_class(cls): # HERE BE DRAGONS # resets the testing config.SETTINGS_FILE we set in the base.BaseCase class base.switch_out_test_settings() @classmethod def teardown_class(cls): base.switch_in_test_settings() @pytest.mark.parametrize("project_name", project.aws_projects().keys()) def test_validation_elife_projects(self, project_name, filter_project_name): "elife projects (and their alternative configurations) that come with the builder pass validation" if filter_project_name: if project_name != filter_project_name: pytest.skip("Filtered out through filter_project_name") cfngen.validate_project(project_name)
Correct pytest setup and teardown
Correct pytest setup and teardown
Python
mit
elifesciences/builder,elifesciences/builder
python
## Code Before: import pytest from tests import base from buildercore import cfngen, project import logging LOG = logging.getLogger(__name__) logging.disable(logging.NOTSET) # re-enables logging during integration testing # Depends on talking to AWS. class TestValidationFixtures(base.BaseCase): def test_validation(self): "dummy projects and their alternative configurations pass validation" for pname in project.aws_projects().keys(): cfngen.validate_project(pname) class TestValidationElife(): def setUp(self): # HERE BE DRAGONS # resets the testing config.SETTINGS_FILE we set in the base.BaseCase class base.switch_out_test_settings() def tearDown(self): base.switch_in_test_settings() @pytest.mark.parametrize("project_name", project.aws_projects().keys()) def test_validation_elife_projects(self, project_name, filter_project_name): "elife projects (and their alternative configurations) that come with the builder pass validation" if filter_project_name: if project_name != filter_project_name: pytest.skip("Filtered out through filter_project_name") cfngen.validate_project(project_name) ## Instruction: Correct pytest setup and teardown ## Code After: import pytest from tests import base from buildercore import cfngen, project import logging LOG = logging.getLogger(__name__) logging.disable(logging.NOTSET) # re-enables logging during integration testing # Depends on talking to AWS. class TestValidationFixtures(base.BaseCase): def test_validation(self): "dummy projects and their alternative configurations pass validation" for pname in project.aws_projects().keys(): cfngen.validate_project(pname) class TestValidationElife(): @classmethod def setup_class(cls): # HERE BE DRAGONS # resets the testing config.SETTINGS_FILE we set in the base.BaseCase class base.switch_out_test_settings() @classmethod def teardown_class(cls): base.switch_in_test_settings() @pytest.mark.parametrize("project_name", project.aws_projects().keys()) def test_validation_elife_projects(self, project_name, filter_project_name): "elife projects (and their alternative configurations) that come with the builder pass validation" if filter_project_name: if project_name != filter_project_name: pytest.skip("Filtered out through filter_project_name") cfngen.validate_project(project_name)
// ... existing code ... cfngen.validate_project(pname) class TestValidationElife(): @classmethod def setup_class(cls): # HERE BE DRAGONS # resets the testing config.SETTINGS_FILE we set in the base.BaseCase class base.switch_out_test_settings() @classmethod def teardown_class(cls): base.switch_in_test_settings() @pytest.mark.parametrize("project_name", project.aws_projects().keys()) // ... rest of the code ...
237a66191295cce2cd52d78bcdb7cbe57e399e56
awx/main/management/commands/remove_instance.py
awx/main/management/commands/remove_instance.py
from django.core.management.base import CommandError from awx.main.management.commands._base_instance import BaseCommandInstance from awx.main.models import Instance instance_str = BaseCommandInstance.instance_str class Command(BaseCommandInstance): """Internal tower command. Remove an existing instance from the HA instance table. This command is idempotent. This command will error out in the following conditions: * Attempting to remove a primary instance. """ def __init__(self): super(Command, self).__init__() self.include_option_hostname_uuid_find() def handle(self, *args, **options): super(Command, self).handle(*args, **options) # Is there an existing record for this machine? If so, retrieve that record and look for issues. try: # Get the instance. instance = Instance.objects.get(**self.get_unique_fields()) # Sanity check: Do not remove the primary instance. if instance.primary: raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance)) # Remove the instance. instance.delete() print('Successfully removed instance %s.' % instance_str(instance)) except Instance.DoesNotExist: print('No matching instance found to remove.')
from django.core.management.base import CommandError from awx.main.management.commands._base_instance import BaseCommandInstance from awx.main.models import Instance instance_str = BaseCommandInstance.instance_str class Command(BaseCommandInstance): """Internal tower command. Remove an existing instance from the HA instance table. This command is idempotent. This command will error out in the following conditions: * Attempting to remove a primary instance. """ def __init__(self): super(Command, self).__init__() self.include_option_hostname_uuid_find() def handle(self, *args, **options): super(Command, self).handle(*args, **options) # Is there an existing record for this machine? If so, retrieve that record and look for issues. try: # Get the instance. instance = Instance.objects.get(**self.get_unique_fields()) # Sanity check: Do not remove the primary instance. if instance.primary: raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance)) # Remove the instance. instance.delete() print('Successfully removed instance %s.' % instance_str(instance)) except Instance.DoesNotExist: print('No matching instance found to remove.')
Fix verbage around why we are disallowing removing a primary
Fix verbage around why we are disallowing removing a primary
Python
apache-2.0
wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx
python
## Code Before: from django.core.management.base import CommandError from awx.main.management.commands._base_instance import BaseCommandInstance from awx.main.models import Instance instance_str = BaseCommandInstance.instance_str class Command(BaseCommandInstance): """Internal tower command. Remove an existing instance from the HA instance table. This command is idempotent. This command will error out in the following conditions: * Attempting to remove a primary instance. """ def __init__(self): super(Command, self).__init__() self.include_option_hostname_uuid_find() def handle(self, *args, **options): super(Command, self).handle(*args, **options) # Is there an existing record for this machine? If so, retrieve that record and look for issues. try: # Get the instance. instance = Instance.objects.get(**self.get_unique_fields()) # Sanity check: Do not remove the primary instance. if instance.primary: raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance)) # Remove the instance. instance.delete() print('Successfully removed instance %s.' % instance_str(instance)) except Instance.DoesNotExist: print('No matching instance found to remove.') ## Instruction: Fix verbage around why we are disallowing removing a primary ## Code After: from django.core.management.base import CommandError from awx.main.management.commands._base_instance import BaseCommandInstance from awx.main.models import Instance instance_str = BaseCommandInstance.instance_str class Command(BaseCommandInstance): """Internal tower command. Remove an existing instance from the HA instance table. This command is idempotent. This command will error out in the following conditions: * Attempting to remove a primary instance. """ def __init__(self): super(Command, self).__init__() self.include_option_hostname_uuid_find() def handle(self, *args, **options): super(Command, self).handle(*args, **options) # Is there an existing record for this machine? If so, retrieve that record and look for issues. try: # Get the instance. instance = Instance.objects.get(**self.get_unique_fields()) # Sanity check: Do not remove the primary instance. if instance.primary: raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance)) # Remove the instance. instance.delete() print('Successfully removed instance %s.' % instance_str(instance)) except Instance.DoesNotExist: print('No matching instance found to remove.')
# ... existing code ... # Sanity check: Do not remove the primary instance. if instance.primary: raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance)) # Remove the instance. instance.delete() # ... rest of the code ...
bc8356bb8359e531c793a2c9fc4a0e46c34361f3
src/xenia/base/bit_field.h
src/xenia/base/bit_field.h
/** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2017 Ben Vanik. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #ifndef XENIA_BASE_BIT_FIELD_H_ #define XENIA_BASE_BIT_FIELD_H_ #include <cstdint> #include <cstdlib> #include <type_traits> namespace xe { // Bitfield, where position starts at the LSB. template <typename T, size_t position, size_t n_bits> struct bf { bf() = default; inline operator T() const { return value(); } inline T value() const { return static_cast<T>((storage & mask()) >> position); } // For enum values, we strip them down to an underlying type. typedef typename std::conditional<std::is_enum<T>::value, std::underlying_type<T>, std::identity<T>>::type::type value_type; inline value_type mask() const { return (((value_type)~0) >> (8 * sizeof(value_type) - n_bits)) << position; } value_type storage; }; } // namespace xe #endif // XENIA_BASE_BIT_FIELD_H_
/** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2017 Ben Vanik. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #ifndef XENIA_BASE_BIT_FIELD_H_ #define XENIA_BASE_BIT_FIELD_H_ #include <cstdint> #include <cstdlib> #include <type_traits> namespace xe { // Bitfield, where position starts at the LSB. template <typename T, size_t position, size_t n_bits> struct bf { bf() = default; inline operator T() const { return value(); } inline T value() const { return static_cast<T>((storage & mask()) >> position); } // For enum values, we strip them down to an underlying type. typedef typename std::conditional<std::is_enum<T>::value, std::underlying_type<T>, std::remove_reference<T>>::type::type value_type; inline value_type mask() const { return (((value_type)~0) >> (8 * sizeof(value_type) - n_bits)) << position; } value_type storage; }; } // namespace xe #endif // XENIA_BASE_BIT_FIELD_H_
Remove usage of non standard identity struct
Remove usage of non standard identity struct std::identity was removed from the standards, but Visual Studio kept it as an extension. Replace it by std::remove_reference, which does just a little bit more than std::identity does, but without impact in this case.
C
bsd-3-clause
sephiroth99/xenia,sephiroth99/xenia,sephiroth99/xenia
c
## Code Before: /** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2017 Ben Vanik. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #ifndef XENIA_BASE_BIT_FIELD_H_ #define XENIA_BASE_BIT_FIELD_H_ #include <cstdint> #include <cstdlib> #include <type_traits> namespace xe { // Bitfield, where position starts at the LSB. template <typename T, size_t position, size_t n_bits> struct bf { bf() = default; inline operator T() const { return value(); } inline T value() const { return static_cast<T>((storage & mask()) >> position); } // For enum values, we strip them down to an underlying type. typedef typename std::conditional<std::is_enum<T>::value, std::underlying_type<T>, std::identity<T>>::type::type value_type; inline value_type mask() const { return (((value_type)~0) >> (8 * sizeof(value_type) - n_bits)) << position; } value_type storage; }; } // namespace xe #endif // XENIA_BASE_BIT_FIELD_H_ ## Instruction: Remove usage of non standard identity struct std::identity was removed from the standards, but Visual Studio kept it as an extension. Replace it by std::remove_reference, which does just a little bit more than std::identity does, but without impact in this case. ## Code After: /** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2017 Ben Vanik. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #ifndef XENIA_BASE_BIT_FIELD_H_ #define XENIA_BASE_BIT_FIELD_H_ #include <cstdint> #include <cstdlib> #include <type_traits> namespace xe { // Bitfield, where position starts at the LSB. template <typename T, size_t position, size_t n_bits> struct bf { bf() = default; inline operator T() const { return value(); } inline T value() const { return static_cast<T>((storage & mask()) >> position); } // For enum values, we strip them down to an underlying type. typedef typename std::conditional<std::is_enum<T>::value, std::underlying_type<T>, std::remove_reference<T>>::type::type value_type; inline value_type mask() const { return (((value_type)~0) >> (8 * sizeof(value_type) - n_bits)) << position; } value_type storage; }; } // namespace xe #endif // XENIA_BASE_BIT_FIELD_H_
... // For enum values, we strip them down to an underlying type. typedef typename std::conditional<std::is_enum<T>::value, std::underlying_type<T>, std::remove_reference<T>>::type::type value_type; inline value_type mask() const { return (((value_type)~0) >> (8 * sizeof(value_type) - n_bits)) << position; } ...
288127c575c7672e3a41d7ada360d56a4853f279
scripts/examples/14-WiFi-Shield/fw_update.py
scripts/examples/14-WiFi-Shield/fw_update.py
import network # Init wlan module in Download mode. wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE) # Start the firmware update process. # For ATWINC1500-MR210PA/B #wlan.fw_update("/winc_19_4_4.bin") # For ATWINC1500-MR210PB only. wlan.fw_update("/winc_19_5_2.bin")
import network # Init wlan module in Download mode. wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE) # For ATWINC1500-MR210PB only. wlan.fw_update("/winc_19_6_1.bin")
Update WiFi firmware update script.
Update WiFi firmware update script.
Python
mit
openmv/openmv,openmv/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv
python
## Code Before: import network # Init wlan module in Download mode. wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE) # Start the firmware update process. # For ATWINC1500-MR210PA/B #wlan.fw_update("/winc_19_4_4.bin") # For ATWINC1500-MR210PB only. wlan.fw_update("/winc_19_5_2.bin") ## Instruction: Update WiFi firmware update script. ## Code After: import network # Init wlan module in Download mode. wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE) # For ATWINC1500-MR210PB only. wlan.fw_update("/winc_19_6_1.bin")
... # Init wlan module in Download mode. wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE) # For ATWINC1500-MR210PB only. wlan.fw_update("/winc_19_6_1.bin") ...
626789d555ff8bfbcb1ab391a534493f3b26069e
sky/services/vsync/src/org/domokit/vsync/VSyncProviderImpl.java
sky/services/vsync/src/org/domokit/vsync/VSyncProviderImpl.java
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.domokit.vsync; import android.view.Choreographer; import org.chromium.mojo.system.MessagePipeHandle; import org.chromium.mojo.system.MojoException; import org.chromium.mojom.vsync.VSyncProvider; /** * Android implementation of VSyncProvider. */ public class VSyncProviderImpl implements VSyncProvider, Choreographer.FrameCallback { private Choreographer mChoreographer; private AwaitVSyncResponse mCallback; private MessagePipeHandle mPipe; public VSyncProviderImpl(MessagePipeHandle pipe) { mPipe = pipe; mChoreographer = Choreographer.getInstance(); } @Override public void close() {} @Override public void onConnectionError(MojoException e) {} @Override public void awaitVSync(final AwaitVSyncResponse callback) { if (mCallback != null) { mPipe.close(); return; } mCallback = callback; mChoreographer.postFrameCallback(this); } @Override public void doFrame(long frameTimeNanos) { mCallback.call(frameTimeNanos / 1000); mCallback = null; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.domokit.vsync; import android.view.Choreographer; import org.chromium.mojo.system.MessagePipeHandle; import org.chromium.mojo.system.MojoException; import org.chromium.mojom.vsync.VSyncProvider; import java.util.ArrayList; /** * Android implementation of VSyncProvider. */ public class VSyncProviderImpl implements VSyncProvider, Choreographer.FrameCallback { private Choreographer mChoreographer; private ArrayList<AwaitVSyncResponse> mCallbacks = new ArrayList<AwaitVSyncResponse>(); private MessagePipeHandle mPipe; public VSyncProviderImpl(MessagePipeHandle pipe) { mPipe = pipe; mChoreographer = Choreographer.getInstance(); } @Override public void close() {} @Override public void onConnectionError(MojoException e) {} @Override public void awaitVSync(final AwaitVSyncResponse callback) { mCallbacks.add(callback); if (mCallbacks.size() == 1) { mChoreographer.postFrameCallback(this); } } @Override public void doFrame(long frameTimeNanos) { long frameTimeMicros = frameTimeNanos / 1000; for (AwaitVSyncResponse callback : mCallbacks) { callback.call(frameTimeMicros); } mCallbacks.clear(); } }
Allow multiple callbacks in the VSyncProvider
Allow multiple callbacks in the VSyncProvider It's possible for multiple calls to AwaitVSync to be pending while reloading an application. The Animator may get an AwaitVSync, a Reset, and then another AwaitVSync before the first vsync completes. If this happens, invoke both callbacks. If we close the pipe, then the Animator will no longer make progress.
Java
bsd-3-clause
mpcomplete/flutter_engine,aam/engine,devoncarew/engine,devoncarew/engine,mikejurka/engine,devoncarew/sky_engine,chinmaygarde/flutter_engine,jamesr/flutter_engine,tvolkert/engine,jason-simmons/sky_engine,Hixie/sky_engine,abarth/sky_engine,aam/engine,devoncarew/sky_engine,abarth/sky_engine,jason-simmons/flutter_engine,flutter/engine,jamesr/sky_engine,jason-simmons/flutter_engine,abarth/sky_engine,jason-simmons/sky_engine,devoncarew/engine,flutter/engine,jamesr/sky_engine,flutter/engine,devoncarew/engine,Hixie/sky_engine,mikejurka/engine,aam/engine,cdotstout/sky_engine,jamesr/flutter_engine,krisgiesing/sky_engine,mpcomplete/flutter_engine,flutter/engine,jason-simmons/flutter_engine,jamesr/sky_engine,tvolkert/engine,krisgiesing/sky_engine,tvolkert/engine,jason-simmons/flutter_engine,krisgiesing/sky_engine,jamesr/sky_engine,chinmaygarde/flutter_engine,aam/engine,tvolkert/engine,aam/engine,flutter/engine,mpcomplete/engine,devoncarew/engine,jason-simmons/sky_engine,mikejurka/engine,cdotstout/sky_engine,cdotstout/sky_engine,jamesr/sky_engine,rmacnak-google/engine,mpcomplete/flutter_engine,jamesr/flutter_engine,tvolkert/engine,Hixie/sky_engine,chinmaygarde/sky_engine,rmacnak-google/engine,mikejurka/engine,jamesr/flutter_engine,abarth/sky_engine,lyceel/engine,rmacnak-google/engine,aam/engine,jamesr/sky_engine,krisgiesing/sky_engine,chinmaygarde/sky_engine,chinmaygarde/sky_engine,tvolkert/engine,mpcomplete/flutter_engine,mpcomplete/engine,jamesr/flutter_engine,mikejurka/engine,jason-simmons/flutter_engine,Hixie/sky_engine,chinmaygarde/sky_engine,rmacnak-google/engine,flutter/engine,chinmaygarde/flutter_engine,krisgiesing/sky_engine,aam/engine,Hixie/sky_engine,jamesr/sky_engine,jason-simmons/sky_engine,Hixie/sky_engine,mpcomplete/engine,jamesr/flutter_engine,abarth/sky_engine,mpcomplete/engine,Hixie/sky_engine,lyceel/engine,chinmaygarde/flutter_engine,jason-simmons/flutter_engine,jason-simmons/sky_engine,chinmaygarde/sky_engine,devoncarew/engine,mpcomplete/engine,jamesr/flutter_engine,mpcomplete/flutter_engine,mpcomplete/engine,tvolkert/engine,mpcomplete/engine,abarth/sky_engine,cdotstout/sky_engine,jason-simmons/sky_engine,devoncarew/sky_engine,mpcomplete/flutter_engine,devoncarew/sky_engine,krisgiesing/sky_engine,lyceel/engine,rmacnak-google/engine,jason-simmons/flutter_engine,cdotstout/sky_engine,chinmaygarde/flutter_engine,mikejurka/engine,aam/engine,jason-simmons/flutter_engine,devoncarew/sky_engine,jason-simmons/sky_engine,chinmaygarde/sky_engine,krisgiesing/sky_engine,chinmaygarde/flutter_engine,lyceel/engine,mikejurka/engine,lyceel/engine,devoncarew/sky_engine,devoncarew/engine,flutter/engine,mikejurka/engine,jamesr/flutter_engine,lyceel/engine,abarth/sky_engine,lyceel/engine,rmacnak-google/engine,Hixie/sky_engine,flutter/engine,mikejurka/engine,chinmaygarde/flutter_engine,rmacnak-google/engine,cdotstout/sky_engine,devoncarew/sky_engine,mpcomplete/engine,jamesr/flutter_engine,chinmaygarde/sky_engine,cdotstout/sky_engine
java
## Code Before: // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.domokit.vsync; import android.view.Choreographer; import org.chromium.mojo.system.MessagePipeHandle; import org.chromium.mojo.system.MojoException; import org.chromium.mojom.vsync.VSyncProvider; /** * Android implementation of VSyncProvider. */ public class VSyncProviderImpl implements VSyncProvider, Choreographer.FrameCallback { private Choreographer mChoreographer; private AwaitVSyncResponse mCallback; private MessagePipeHandle mPipe; public VSyncProviderImpl(MessagePipeHandle pipe) { mPipe = pipe; mChoreographer = Choreographer.getInstance(); } @Override public void close() {} @Override public void onConnectionError(MojoException e) {} @Override public void awaitVSync(final AwaitVSyncResponse callback) { if (mCallback != null) { mPipe.close(); return; } mCallback = callback; mChoreographer.postFrameCallback(this); } @Override public void doFrame(long frameTimeNanos) { mCallback.call(frameTimeNanos / 1000); mCallback = null; } } ## Instruction: Allow multiple callbacks in the VSyncProvider It's possible for multiple calls to AwaitVSync to be pending while reloading an application. The Animator may get an AwaitVSync, a Reset, and then another AwaitVSync before the first vsync completes. If this happens, invoke both callbacks. If we close the pipe, then the Animator will no longer make progress. ## Code After: // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.domokit.vsync; import android.view.Choreographer; import org.chromium.mojo.system.MessagePipeHandle; import org.chromium.mojo.system.MojoException; import org.chromium.mojom.vsync.VSyncProvider; import java.util.ArrayList; /** * Android implementation of VSyncProvider. */ public class VSyncProviderImpl implements VSyncProvider, Choreographer.FrameCallback { private Choreographer mChoreographer; private ArrayList<AwaitVSyncResponse> mCallbacks = new ArrayList<AwaitVSyncResponse>(); private MessagePipeHandle mPipe; public VSyncProviderImpl(MessagePipeHandle pipe) { mPipe = pipe; mChoreographer = Choreographer.getInstance(); } @Override public void close() {} @Override public void onConnectionError(MojoException e) {} @Override public void awaitVSync(final AwaitVSyncResponse callback) { mCallbacks.add(callback); if (mCallbacks.size() == 1) { mChoreographer.postFrameCallback(this); } } @Override public void doFrame(long frameTimeNanos) { long frameTimeMicros = frameTimeNanos / 1000; for (AwaitVSyncResponse callback : mCallbacks) { callback.call(frameTimeMicros); } mCallbacks.clear(); } }
... import org.chromium.mojo.system.MojoException; import org.chromium.mojom.vsync.VSyncProvider; import java.util.ArrayList; /** * Android implementation of VSyncProvider. */ public class VSyncProviderImpl implements VSyncProvider, Choreographer.FrameCallback { private Choreographer mChoreographer; private ArrayList<AwaitVSyncResponse> mCallbacks = new ArrayList<AwaitVSyncResponse>(); private MessagePipeHandle mPipe; public VSyncProviderImpl(MessagePipeHandle pipe) { ... @Override public void awaitVSync(final AwaitVSyncResponse callback) { mCallbacks.add(callback); if (mCallbacks.size() == 1) { mChoreographer.postFrameCallback(this); } } @Override public void doFrame(long frameTimeNanos) { long frameTimeMicros = frameTimeNanos / 1000; for (AwaitVSyncResponse callback : mCallbacks) { callback.call(frameTimeMicros); } mCallbacks.clear(); } } ...
b9379e3c8667d062ec6511ad07f2525ea0b2f5ef
tests/test_statepoint_sourcesep/test_statepoint_sourcesep.py
tests/test_statepoint_sourcesep/test_statepoint_sourcesep.py
import sys sys.path.insert(0, '..') from testing_harness import * class SourcepointTestHarness(TestHarness): def _test_output_created(self): """Make sure statepoint.* and source* have been created.""" TestHarness._test_output_created(self) source = glob.glob(os.path.join(os.getcwd(), 'source.*')) assert len(source) == 1, 'Either multiple or no source files ' \ 'exist.' assert source[0].endswith('h5'), \ 'Source file is not a HDF5 file.' if __name__ == '__main__': harness = SourcepointTestHarness('statepoint.10.*') harness.main()
import sys sys.path.insert(0, '..') from testing_harness import * class SourcepointTestHarness(TestHarness): def _test_output_created(self): """Make sure statepoint.* and source* have been created.""" TestHarness._test_output_created(self) source = glob.glob(os.path.join(os.getcwd(), 'source.*')) assert len(source) == 1, 'Either multiple or no source files ' \ 'exist.' assert source[0].endswith('h5'), \ 'Source file is not a HDF5 file.' def _cleanup(self): TestHarness._cleanup(self) output = glob.glob(os.path.join(os.getcwd(), 'source.*')) for f in output: if os.path.exists(f): os.remove(f) if __name__ == '__main__': harness = SourcepointTestHarness('statepoint.10.*') harness.main()
Make test cleanup source file
Make test cleanup source file
Python
mit
amandalund/openmc,mit-crpg/openmc,shikhar413/openmc,walshjon/openmc,bhermanmit/openmc,mjlong/openmc,paulromano/openmc,samuelshaner/openmc,smharper/openmc,liangjg/openmc,paulromano/openmc,mit-crpg/openmc,shikhar413/openmc,wbinventor/openmc,walshjon/openmc,wbinventor/openmc,wbinventor/openmc,wbinventor/openmc,kellyrowland/openmc,bhermanmit/openmc,liangjg/openmc,liangjg/openmc,shikhar413/openmc,shikhar413/openmc,mit-crpg/openmc,smharper/openmc,amandalund/openmc,johnnyliu27/openmc,smharper/openmc,paulromano/openmc,walshjon/openmc,smharper/openmc,walshjon/openmc,johnnyliu27/openmc,liangjg/openmc,amandalund/openmc,amandalund/openmc,paulromano/openmc,samuelshaner/openmc,kellyrowland/openmc,samuelshaner/openmc,mjlong/openmc,johnnyliu27/openmc,johnnyliu27/openmc,mit-crpg/openmc,samuelshaner/openmc
python
## Code Before: import sys sys.path.insert(0, '..') from testing_harness import * class SourcepointTestHarness(TestHarness): def _test_output_created(self): """Make sure statepoint.* and source* have been created.""" TestHarness._test_output_created(self) source = glob.glob(os.path.join(os.getcwd(), 'source.*')) assert len(source) == 1, 'Either multiple or no source files ' \ 'exist.' assert source[0].endswith('h5'), \ 'Source file is not a HDF5 file.' if __name__ == '__main__': harness = SourcepointTestHarness('statepoint.10.*') harness.main() ## Instruction: Make test cleanup source file ## Code After: import sys sys.path.insert(0, '..') from testing_harness import * class SourcepointTestHarness(TestHarness): def _test_output_created(self): """Make sure statepoint.* and source* have been created.""" TestHarness._test_output_created(self) source = glob.glob(os.path.join(os.getcwd(), 'source.*')) assert len(source) == 1, 'Either multiple or no source files ' \ 'exist.' assert source[0].endswith('h5'), \ 'Source file is not a HDF5 file.' def _cleanup(self): TestHarness._cleanup(self) output = glob.glob(os.path.join(os.getcwd(), 'source.*')) for f in output: if os.path.exists(f): os.remove(f) if __name__ == '__main__': harness = SourcepointTestHarness('statepoint.10.*') harness.main()
# ... existing code ... assert source[0].endswith('h5'), \ 'Source file is not a HDF5 file.' def _cleanup(self): TestHarness._cleanup(self) output = glob.glob(os.path.join(os.getcwd(), 'source.*')) for f in output: if os.path.exists(f): os.remove(f) if __name__ == '__main__': harness = SourcepointTestHarness('statepoint.10.*') # ... rest of the code ...
157c6cfad203ac31e72b00e08da5e3ae50286215
src/main/java/in/twizmwaz/cardinal/module/modules/filter/type/VoidFilter.java
src/main/java/in/twizmwaz/cardinal/module/modules/filter/type/VoidFilter.java
package in.twizmwaz.cardinal.module.modules.filter.type; import in.twizmwaz.cardinal.GameHandler; import in.twizmwaz.cardinal.module.modules.filter.FilterModule; import in.twizmwaz.cardinal.module.modules.filter.FilterState; import in.twizmwaz.cardinal.module.modules.filter.parsers.GenericFilterParser; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import static in.twizmwaz.cardinal.module.modules.filter.FilterState.*; public class VoidFilter extends FilterModule { public VoidFilter(final GenericFilterParser parser) { super(parser.getName()); } @Override public FilterState evaluate(final Object object) { if (object instanceof Block) { Block check = new Location(GameHandler.getGameHandler().getMatchWorld(), ((Block) object).getX(), 0, ((Block) object).getZ()).getBlock(); return check.getType() == Material.AIR ? DENY : ALLOW; } else return ABSTAIN; } }
package in.twizmwaz.cardinal.module.modules.filter.type; import in.twizmwaz.cardinal.GameHandler; import in.twizmwaz.cardinal.module.modules.filter.FilterModule; import in.twizmwaz.cardinal.module.modules.filter.FilterState; import in.twizmwaz.cardinal.module.modules.filter.parsers.GenericFilterParser; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import static in.twizmwaz.cardinal.module.modules.filter.FilterState.*; public class VoidFilter extends FilterModule { public VoidFilter(final GenericFilterParser parser) { super(parser.getName()); } @Override public FilterState evaluate(final Object object) { if (object instanceof Block) { Block check = new Location(GameHandler.getGameHandler().getMatchWorld(), ((Block) object).getX(), 0, ((Block) object).getZ()).getBlock(); return check.getType() == Material.AIR ? ALLOW : DENY; } else return ABSTAIN; } }
Switch void filters to make them work correcty
Switch void filters to make them work correcty
Java
mit
TheMolkaPL/CardinalPGM,dentmaged/CardinalPGM,Electroid/ExperimentalPGM,TheMolkaPL/CardinalPGM,dentmaged/Cardinal-Dev,SungMatt/CardinalPGM,dentmaged/Cardinal-Dev,Alan736/NotCardinalPGM,Electroid/ExperimentalPGM,angelitorb99/CardinalPGM,Pablete1234/CardinalPGM,dentmaged/Cardinal-Plus,twizmwazin/CardinalPGM,CaptainElliott/CardinalPGM,Aaron1011/CardinalPGM,Alan736/NotCardinalPGM,dentmaged/CardinalPGM,dentmaged/Cardinal-Plus,iPGz/CardinalPGM
java
## Code Before: package in.twizmwaz.cardinal.module.modules.filter.type; import in.twizmwaz.cardinal.GameHandler; import in.twizmwaz.cardinal.module.modules.filter.FilterModule; import in.twizmwaz.cardinal.module.modules.filter.FilterState; import in.twizmwaz.cardinal.module.modules.filter.parsers.GenericFilterParser; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import static in.twizmwaz.cardinal.module.modules.filter.FilterState.*; public class VoidFilter extends FilterModule { public VoidFilter(final GenericFilterParser parser) { super(parser.getName()); } @Override public FilterState evaluate(final Object object) { if (object instanceof Block) { Block check = new Location(GameHandler.getGameHandler().getMatchWorld(), ((Block) object).getX(), 0, ((Block) object).getZ()).getBlock(); return check.getType() == Material.AIR ? DENY : ALLOW; } else return ABSTAIN; } } ## Instruction: Switch void filters to make them work correcty ## Code After: package in.twizmwaz.cardinal.module.modules.filter.type; import in.twizmwaz.cardinal.GameHandler; import in.twizmwaz.cardinal.module.modules.filter.FilterModule; import in.twizmwaz.cardinal.module.modules.filter.FilterState; import in.twizmwaz.cardinal.module.modules.filter.parsers.GenericFilterParser; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import static in.twizmwaz.cardinal.module.modules.filter.FilterState.*; public class VoidFilter extends FilterModule { public VoidFilter(final GenericFilterParser parser) { super(parser.getName()); } @Override public FilterState evaluate(final Object object) { if (object instanceof Block) { Block check = new Location(GameHandler.getGameHandler().getMatchWorld(), ((Block) object).getX(), 0, ((Block) object).getZ()).getBlock(); return check.getType() == Material.AIR ? ALLOW : DENY; } else return ABSTAIN; } }
# ... existing code ... if (object instanceof Block) { Block check = new Location(GameHandler.getGameHandler().getMatchWorld(), ((Block) object).getX(), 0, ((Block) object).getZ()).getBlock(); return check.getType() == Material.AIR ? ALLOW : DENY; } else return ABSTAIN; } } # ... rest of the code ...
bb226f2b27f91a39bdc1db2db9c6e23664d3dd94
src/upgrades/SpeedShotUpgrade.java
src/upgrades/SpeedShotUpgrade.java
package edu.stuy.starlorn.upgrades; public class SpeedShotUpgrade extends GunUpgrade { public SpeedShotUpgrade() { super(); _name = "Speed Shot"; _description = "Shot Speed x 2!"; } @Override public double getShotSpeed(double shotspeed) { return shotspeed * 2; } @Override public Upgrade clone() { return new SpeedShotUpgrade(); } }
package edu.stuy.starlorn.upgrades; public class SpeedShotUpgrade extends GunUpgrade { public SpeedShotUpgrade() { super(); _name = "Speed Shot"; _description = "Shot Speed x 2!"; } @Override public double getShotSpeed(double shotspeed) { return shotspeed * 2; } @Override public double getCooldown(double cooldown) { return cooldown * .9; } @Override public Upgrade clone() { return new SpeedShotUpgrade(); } }
Add a slight cooldown buff to speedshot to make it less crappy
Add a slight cooldown buff to speedshot to make it less crappy
Java
mit
Hypersonic/Starlorn
java
## Code Before: package edu.stuy.starlorn.upgrades; public class SpeedShotUpgrade extends GunUpgrade { public SpeedShotUpgrade() { super(); _name = "Speed Shot"; _description = "Shot Speed x 2!"; } @Override public double getShotSpeed(double shotspeed) { return shotspeed * 2; } @Override public Upgrade clone() { return new SpeedShotUpgrade(); } } ## Instruction: Add a slight cooldown buff to speedshot to make it less crappy ## Code After: package edu.stuy.starlorn.upgrades; public class SpeedShotUpgrade extends GunUpgrade { public SpeedShotUpgrade() { super(); _name = "Speed Shot"; _description = "Shot Speed x 2!"; } @Override public double getShotSpeed(double shotspeed) { return shotspeed * 2; } @Override public double getCooldown(double cooldown) { return cooldown * .9; } @Override public Upgrade clone() { return new SpeedShotUpgrade(); } }
... } @Override public double getCooldown(double cooldown) { return cooldown * .9; } @Override public Upgrade clone() { return new SpeedShotUpgrade(); } ...
9d0b1990b979de19939cc37cbefb86e1a0cd4e0f
test/perf/perf.py
test/perf/perf.py
import numpy as np import pylab as pl import sys import timeit from pykalman import KalmanFilter N = int(sys.argv[1]) random_state = np.random.RandomState(0) transition_matrix = [[1, 0.01], [-0.01, 1]] transition_offset = [0.0,0.0] observation_matrix = [1.0,0] observation_offset = [0.0] transition_covariance = 1e-10*np.eye(2) observation_covariance = [0.1] initial_state_mean = [1.0,0.0] initial_state_covariance = [[1,0.1],[-0.1,1]] kf = KalmanFilter( transition_matrices=transition_matrix,observation_matrices=observation_matrix, transition_covariance=transition_covariance, observation_covariance=observation_covariance, transition_offsets=transition_offset, observation_offsets=observation_offset, initial_state_mean=initial_state_mean, initial_state_covariance=initial_state_covariance, random_state=random_state ) ts = np.linspace(0,0.01*1000,1000) observations = np.cos(ts) + np.sqrt(0.1) * random_state.randn(1000) states = np.cos(ts) t = timeit.timeit('filtered_state_estimates = kf.filter(observations)[0]','from __main__ import kf,observations',number=N) print t
import numpy as np import sys import timeit from pykalman import KalmanFilter N = int(sys.argv[1]) random_state = np.random.RandomState(0) transition_matrix = [[1, 0.01], [-0.01, 1]] transition_offset = [0.0,0.0] observation_matrix = [1.0,0] observation_offset = [0.0] transition_covariance = 1e-10*np.eye(2) observation_covariance = [0.1] initial_state_mean = [1.0,0.0] initial_state_covariance = [[1,0.1],[-0.1,1]] kf = KalmanFilter( transition_matrices=transition_matrix,observation_matrices=observation_matrix, transition_covariance=transition_covariance, observation_covariance=observation_covariance, transition_offsets=transition_offset, observation_offsets=observation_offset, initial_state_mean=initial_state_mean, initial_state_covariance=initial_state_covariance, random_state=random_state ) ts = np.linspace(0,0.01*1000,1000) observations = np.cos(ts) + np.sqrt(0.1) * random_state.randn(1000) states = np.cos(ts) t = timeit.timeit('filtered_state_estimates = kf.filter(observations)[0]','from __main__ import kf,observations',number=N) print t
Remove pylab from import statements
Remove pylab from import statements
Python
mit
wkearn/Kalman.jl,wkearn/Kalman.jl
python
## Code Before: import numpy as np import pylab as pl import sys import timeit from pykalman import KalmanFilter N = int(sys.argv[1]) random_state = np.random.RandomState(0) transition_matrix = [[1, 0.01], [-0.01, 1]] transition_offset = [0.0,0.0] observation_matrix = [1.0,0] observation_offset = [0.0] transition_covariance = 1e-10*np.eye(2) observation_covariance = [0.1] initial_state_mean = [1.0,0.0] initial_state_covariance = [[1,0.1],[-0.1,1]] kf = KalmanFilter( transition_matrices=transition_matrix,observation_matrices=observation_matrix, transition_covariance=transition_covariance, observation_covariance=observation_covariance, transition_offsets=transition_offset, observation_offsets=observation_offset, initial_state_mean=initial_state_mean, initial_state_covariance=initial_state_covariance, random_state=random_state ) ts = np.linspace(0,0.01*1000,1000) observations = np.cos(ts) + np.sqrt(0.1) * random_state.randn(1000) states = np.cos(ts) t = timeit.timeit('filtered_state_estimates = kf.filter(observations)[0]','from __main__ import kf,observations',number=N) print t ## Instruction: Remove pylab from import statements ## Code After: import numpy as np import sys import timeit from pykalman import KalmanFilter N = int(sys.argv[1]) random_state = np.random.RandomState(0) transition_matrix = [[1, 0.01], [-0.01, 1]] transition_offset = [0.0,0.0] observation_matrix = [1.0,0] observation_offset = [0.0] transition_covariance = 1e-10*np.eye(2) observation_covariance = [0.1] initial_state_mean = [1.0,0.0] initial_state_covariance = [[1,0.1],[-0.1,1]] kf = KalmanFilter( transition_matrices=transition_matrix,observation_matrices=observation_matrix, transition_covariance=transition_covariance, observation_covariance=observation_covariance, transition_offsets=transition_offset, observation_offsets=observation_offset, initial_state_mean=initial_state_mean, initial_state_covariance=initial_state_covariance, random_state=random_state ) ts = np.linspace(0,0.01*1000,1000) observations = np.cos(ts) + np.sqrt(0.1) * random_state.randn(1000) states = np.cos(ts) t = timeit.timeit('filtered_state_estimates = kf.filter(observations)[0]','from __main__ import kf,observations',number=N) print t
# ... existing code ... import numpy as np import sys import timeit from pykalman import KalmanFilter # ... rest of the code ...
35bc38ac4592800a2c3d13b001a0b66679c8f0b7
include/api/ofp_epoll.h
include/api/ofp_epoll.h
/* Copyright (c) 2016, Nokia * Copyright (c) 2016, ENEA Software AB * All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ #ifndef __OFP_EPOLL_H__ #define __OFP_EPOLL_H__ #include <stdint.h> typedef union ofp_epoll_data { void *ptr; int fd; uint32_t u32; uint64_t u64; } ofp_epoll_data_t; struct ofp_epoll_event { uint32_t events; ofp_epoll_data_t data; }; enum OFP_EPOLL_EVENTS { OFP_EPOLLIN = 0x001, #define OFP_EPOLLIN OFP_EPOLLIN }; #define OFP_EPOLL_CTL_ADD 1 #define OFP_EPOLL_CTL_DEL 2 #define OFP_EPOLL_CTL_MOD 3 int ofp_epoll_create(int size); int ofp_epoll_ctl(int epfd, int op, int fd, struct ofp_epoll_event *event); int ofp_epoll_wait(int epfd, struct ofp_epoll_event *events, int maxevents, int timeout); #endif
/* Copyright (c) 2016, Nokia * Copyright (c) 2016, ENEA Software AB * All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ #ifndef __OFP_EPOLL_H__ #define __OFP_EPOLL_H__ #include <stdint.h> #if __GNUC__ >= 4 #pragma GCC visibility push(default) #endif typedef union ofp_epoll_data { void *ptr; int fd; uint32_t u32; uint64_t u64; } ofp_epoll_data_t; struct ofp_epoll_event { uint32_t events; ofp_epoll_data_t data; }; enum OFP_EPOLL_EVENTS { OFP_EPOLLIN = 0x001, #define OFP_EPOLLIN OFP_EPOLLIN }; #define OFP_EPOLL_CTL_ADD 1 #define OFP_EPOLL_CTL_DEL 2 #define OFP_EPOLL_CTL_MOD 3 int ofp_epoll_create(int size); int ofp_epoll_ctl(int epfd, int op, int fd, struct ofp_epoll_event *event); int ofp_epoll_wait(int epfd, struct ofp_epoll_event *events, int maxevents, int timeout); #if __GNUC__ >= 4 #pragma GCC visibility pop #endif #endif
Add visibility to epoll headers
Add visibility to epoll headers The odp_epoll_* symbols were not visible in the final library built with GCC. Signed-off-by: Oriol Arcas <[email protected]> Reviewed-by: Sorin Vultureanu <[email protected]>
C
bsd-3-clause
TolikH/ofp,TolikH/ofp,OpenFastPath/ofp,OpenFastPath/ofp,OpenFastPath/ofp,TolikH/ofp,OpenFastPath/ofp
c
## Code Before: /* Copyright (c) 2016, Nokia * Copyright (c) 2016, ENEA Software AB * All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ #ifndef __OFP_EPOLL_H__ #define __OFP_EPOLL_H__ #include <stdint.h> typedef union ofp_epoll_data { void *ptr; int fd; uint32_t u32; uint64_t u64; } ofp_epoll_data_t; struct ofp_epoll_event { uint32_t events; ofp_epoll_data_t data; }; enum OFP_EPOLL_EVENTS { OFP_EPOLLIN = 0x001, #define OFP_EPOLLIN OFP_EPOLLIN }; #define OFP_EPOLL_CTL_ADD 1 #define OFP_EPOLL_CTL_DEL 2 #define OFP_EPOLL_CTL_MOD 3 int ofp_epoll_create(int size); int ofp_epoll_ctl(int epfd, int op, int fd, struct ofp_epoll_event *event); int ofp_epoll_wait(int epfd, struct ofp_epoll_event *events, int maxevents, int timeout); #endif ## Instruction: Add visibility to epoll headers The odp_epoll_* symbols were not visible in the final library built with GCC. Signed-off-by: Oriol Arcas <[email protected]> Reviewed-by: Sorin Vultureanu <[email protected]> ## Code After: /* Copyright (c) 2016, Nokia * Copyright (c) 2016, ENEA Software AB * All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ #ifndef __OFP_EPOLL_H__ #define __OFP_EPOLL_H__ #include <stdint.h> #if __GNUC__ >= 4 #pragma GCC visibility push(default) #endif typedef union ofp_epoll_data { void *ptr; int fd; uint32_t u32; uint64_t u64; } ofp_epoll_data_t; struct ofp_epoll_event { uint32_t events; ofp_epoll_data_t data; }; enum OFP_EPOLL_EVENTS { OFP_EPOLLIN = 0x001, #define OFP_EPOLLIN OFP_EPOLLIN }; #define OFP_EPOLL_CTL_ADD 1 #define OFP_EPOLL_CTL_DEL 2 #define OFP_EPOLL_CTL_MOD 3 int ofp_epoll_create(int size); int ofp_epoll_ctl(int epfd, int op, int fd, struct ofp_epoll_event *event); int ofp_epoll_wait(int epfd, struct ofp_epoll_event *events, int maxevents, int timeout); #if __GNUC__ >= 4 #pragma GCC visibility pop #endif #endif
# ... existing code ... #define __OFP_EPOLL_H__ #include <stdint.h> #if __GNUC__ >= 4 #pragma GCC visibility push(default) #endif typedef union ofp_epoll_data { void *ptr; # ... modified code ... int ofp_epoll_wait(int epfd, struct ofp_epoll_event *events, int maxevents, int timeout); #if __GNUC__ >= 4 #pragma GCC visibility pop #endif #endif # ... rest of the code ...
630ca7355843a0f8c914c7af734547908e9045f4
src/main/java/au/com/auspost/api/postcode/search/model/Locality.java
src/main/java/au/com/auspost/api/postcode/search/model/Locality.java
package au.com.auspost.api.postcode.search.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; /** * A single post code locality */ @XmlAccessorType(XmlAccessType.FIELD) public class Locality { @XmlElement private String category; @XmlElement private Integer id; @XmlElement private String location; @XmlElement private Integer postcode; @XmlElement private AustralianState state; @XmlElement private Float latitude; @XmlElement private Float longitude; public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public Integer getPostcode() { return postcode; } public void setPostcode(Integer postcode) { this.postcode = postcode; } public AustralianState getState() { return state; } public void setState(AustralianState state) { this.state = state; } public Float getLatitude() { return latitude; } public void setLatitude(Float latitude) { this.latitude = latitude; } public Float getLongitude() { return longitude; } public void setLongitude(Float longitude) { this.longitude = longitude; } }
package au.com.auspost.api.postcode.search.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; /** * A single post code locality */ @XmlAccessorType(XmlAccessType.FIELD) public class Locality { @XmlElement private String category; @XmlElement private Integer id; @XmlElement private String location; @XmlElement private String postcode; @XmlElement private AustralianState state; @XmlElement private Float latitude; @XmlElement private Float longitude; public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public String getPostcode() { return postcode; } public void setPostcode(String postcode) { this.postcode = postcode; } public AustralianState getState() { return state; } public void setState(AustralianState state) { this.state = state; } public Float getLatitude() { return latitude; } public void setLatitude(Float latitude) { this.latitude = latitude; } public Float getLongitude() { return longitude; } public void setLongitude(Float longitude) { this.longitude = longitude; } }
Change postcode from Integer to String.
Change postcode from Integer to String.
Java
mit
ashri/java-auspost-api
java
## Code Before: package au.com.auspost.api.postcode.search.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; /** * A single post code locality */ @XmlAccessorType(XmlAccessType.FIELD) public class Locality { @XmlElement private String category; @XmlElement private Integer id; @XmlElement private String location; @XmlElement private Integer postcode; @XmlElement private AustralianState state; @XmlElement private Float latitude; @XmlElement private Float longitude; public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public Integer getPostcode() { return postcode; } public void setPostcode(Integer postcode) { this.postcode = postcode; } public AustralianState getState() { return state; } public void setState(AustralianState state) { this.state = state; } public Float getLatitude() { return latitude; } public void setLatitude(Float latitude) { this.latitude = latitude; } public Float getLongitude() { return longitude; } public void setLongitude(Float longitude) { this.longitude = longitude; } } ## Instruction: Change postcode from Integer to String. ## Code After: package au.com.auspost.api.postcode.search.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; /** * A single post code locality */ @XmlAccessorType(XmlAccessType.FIELD) public class Locality { @XmlElement private String category; @XmlElement private Integer id; @XmlElement private String location; @XmlElement private String postcode; @XmlElement private AustralianState state; @XmlElement private Float latitude; @XmlElement private Float longitude; public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public String getPostcode() { return postcode; } public void setPostcode(String postcode) { this.postcode = postcode; } public AustralianState getState() { return state; } public void setState(AustralianState state) { this.state = state; } public Float getLatitude() { return latitude; } public void setLatitude(Float latitude) { this.latitude = latitude; } public Float getLongitude() { return longitude; } public void setLongitude(Float longitude) { this.longitude = longitude; } }
# ... existing code ... private String location; @XmlElement private String postcode; @XmlElement private AustralianState state; # ... modified code ... this.location = location; } public String getPostcode() { return postcode; } public void setPostcode(String postcode) { this.postcode = postcode; } # ... rest of the code ...
b5afdd604831f985427880537d37eb7a35addaa1
tests/functional/test_python_option.py
tests/functional/test_python_option.py
import json import os from pathlib import Path from venv import EnvBuilder from tests.lib import PipTestEnvironment, TestData def test_python_interpreter( script: PipTestEnvironment, tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fsdecode(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [] script.pip( "--python", env_path, "install", "-f", shared_data.find_links, "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}] script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == []
import json import os from pathlib import Path from venv import EnvBuilder from tests.lib import PipTestEnvironment, TestData def test_python_interpreter( script: PipTestEnvironment, tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fspath(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") before = json.loads(result.stdout) # Ideally we would assert that before==[], but there's a problem in CI # that means this isn't true. See https://github.com/pypa/pip/pull/11326 # for details. script.pip( "--python", env_path, "install", "-f", shared_data.find_links, "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") installed = json.loads(result.stdout) assert {"name": "simplewheel", "version": "1.0"} in installed script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before
Fix test to cater for packages leaked into venv
Fix test to cater for packages leaked into venv
Python
mit
pfmoore/pip,pypa/pip,sbidoul/pip,pfmoore/pip,pradyunsg/pip,sbidoul/pip,pypa/pip,pradyunsg/pip
python
## Code Before: import json import os from pathlib import Path from venv import EnvBuilder from tests.lib import PipTestEnvironment, TestData def test_python_interpreter( script: PipTestEnvironment, tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fsdecode(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [] script.pip( "--python", env_path, "install", "-f", shared_data.find_links, "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}] script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [] ## Instruction: Fix test to cater for packages leaked into venv ## Code After: import json import os from pathlib import Path from venv import EnvBuilder from tests.lib import PipTestEnvironment, TestData def test_python_interpreter( script: PipTestEnvironment, tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fspath(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") before = json.loads(result.stdout) # Ideally we would assert that before==[], but there's a problem in CI # that means this isn't true. See https://github.com/pypa/pip/pull/11326 # for details. script.pip( "--python", env_path, "install", "-f", shared_data.find_links, "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") installed = json.loads(result.stdout) assert {"name": "simplewheel", "version": "1.0"} in installed script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before
// ... existing code ... tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fspath(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") before = json.loads(result.stdout) # Ideally we would assert that before==[], but there's a problem in CI # that means this isn't true. See https://github.com/pypa/pip/pull/11326 # for details. script.pip( "--python", env_path, // ... modified code ... "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") installed = json.loads(result.stdout) assert {"name": "simplewheel", "version": "1.0"} in installed script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before // ... rest of the code ...
447a5d8024b547e4b008268f128456ca3b764b6b
src/test.c
src/test.c
int main() { int a = 0; while(1) { printf("%d\t",a); printf("a = 0x%x\n",a); int temp; scanf("%d",&temp); if(temp == 0) { continue; } else { a = temp; } printf("%d\t",a); printf("a changed 0x%x\n",a); } return 0; }
int main() { int a = 0; while(1) { printf("%d\t",a); printf("a = 0x%x\n",a); int temp; scanf("%d",&temp); if(temp == 0) { continue; } else { a = temp; } printf("%d\t",a); printf("a changed 0x%x\n",a); } return 0; }
Test file tab to space
Test file tab to space
C
bsd-3-clause
allencch/med,allencch/med,allencch/med
c
## Code Before: int main() { int a = 0; while(1) { printf("%d\t",a); printf("a = 0x%x\n",a); int temp; scanf("%d",&temp); if(temp == 0) { continue; } else { a = temp; } printf("%d\t",a); printf("a changed 0x%x\n",a); } return 0; } ## Instruction: Test file tab to space ## Code After: int main() { int a = 0; while(1) { printf("%d\t",a); printf("a = 0x%x\n",a); int temp; scanf("%d",&temp); if(temp == 0) { continue; } else { a = temp; } printf("%d\t",a); printf("a changed 0x%x\n",a); } return 0; }
// ... existing code ... int main() { int a = 0; while(1) { printf("%d\t",a); printf("a = 0x%x\n",a); int temp; scanf("%d",&temp); if(temp == 0) { continue; } else { a = temp; } printf("%d\t",a); printf("a changed 0x%x\n",a); } return 0; } // ... rest of the code ...
6fe48fc7499327d27f69204b7f8ec927fc975177
python/lexPythonMQ.py
python/lexPythonMQ.py
import tokenize; import zmq; context = zmq.Context() socket = context.socket(zmq.REP) socket.bind("tcp://lo:32132") while True: # Wait for next request from client message = socket.recv()
import re, sys, tokenize, zmq; from StringIO import StringIO def err(msg): sys.err.write(str(msg) + '\n') class LexPyMQ(object): def __init__(self): self.zctx = zmq.Context() self.socket = self.zctx.socket(zmq.REP) def run(self): self.socket.bind("tcp://lo:32132") while True: msg = self.socket.recv_json(0) # there are definitely new lines in the code if not msg.get('python'): err('received non-python code') code = msg.get('body', '') self.socket.send_json(tokenize.generate_tokens(StringIO(code))) if __name__ == '__main__': LexPyMQ().run()
Implement python lexer ZMQ service.
Implement python lexer ZMQ service.
Python
agpl-3.0
orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,abramhindle/UnnaturalCodeFork,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,abramhindle/UnnaturalCodeFork,orezpraw/estimate-charm,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,abramhindle/UnnaturalCodeFork,naturalness/unnaturalcode,abramhindle/UnnaturalCodeFork
python
## Code Before: import tokenize; import zmq; context = zmq.Context() socket = context.socket(zmq.REP) socket.bind("tcp://lo:32132") while True: # Wait for next request from client message = socket.recv() ## Instruction: Implement python lexer ZMQ service. ## Code After: import re, sys, tokenize, zmq; from StringIO import StringIO def err(msg): sys.err.write(str(msg) + '\n') class LexPyMQ(object): def __init__(self): self.zctx = zmq.Context() self.socket = self.zctx.socket(zmq.REP) def run(self): self.socket.bind("tcp://lo:32132") while True: msg = self.socket.recv_json(0) # there are definitely new lines in the code if not msg.get('python'): err('received non-python code') code = msg.get('body', '') self.socket.send_json(tokenize.generate_tokens(StringIO(code))) if __name__ == '__main__': LexPyMQ().run()
# ... existing code ... import re, sys, tokenize, zmq; from StringIO import StringIO def err(msg): sys.err.write(str(msg) + '\n') class LexPyMQ(object): def __init__(self): self.zctx = zmq.Context() self.socket = self.zctx.socket(zmq.REP) def run(self): self.socket.bind("tcp://lo:32132") while True: msg = self.socket.recv_json(0) # there are definitely new lines in the code if not msg.get('python'): err('received non-python code') code = msg.get('body', '') self.socket.send_json(tokenize.generate_tokens(StringIO(code))) if __name__ == '__main__': LexPyMQ().run() # ... rest of the code ...
6c8122be60b25bbe9ba4ff8a714370e801e6ae70
cufflinks/offline.py
cufflinks/offline.py
import plotly.offline as py_offline ### Offline Mode def go_offline(connected=False): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
import plotly.offline as py_offline ### Offline Mode def run_from_ipython(): try: __IPYTHON__ return True except NameError: return False def go_offline(connected=False): if run_from_ipython(): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
Call init_notebook_mode only if inside IPython
Call init_notebook_mode only if inside IPython
Python
mit
santosjorge/cufflinks
python
## Code Before: import plotly.offline as py_offline ### Offline Mode def go_offline(connected=False): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url) ## Instruction: Call init_notebook_mode only if inside IPython ## Code After: import plotly.offline as py_offline ### Offline Mode def run_from_ipython(): try: __IPYTHON__ return True except NameError: return False def go_offline(connected=False): if run_from_ipython(): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False def is_offline(): return py_offline.__PLOTLY_OFFLINE_INITIALIZED def upgrade(url=None): from .auth import get_config_file if not url: if 'http' not in get_config_file()['offline_url']: raise Exception("No default offline URL set \n" "Please run cf.set_config_file(offline_url=xx) to set \n" "the default offline URL.") else: url=get_config_file()['offline_url'] py_offline.download_plotlyjs(url)
... ### Offline Mode def run_from_ipython(): try: __IPYTHON__ return True except NameError: return False def go_offline(connected=False): if run_from_ipython(): try: py_offline.init_notebook_mode(connected) except TypeError: #For older versions of plotly py_offline.init_notebook_mode() py_offline.__PLOTLY_OFFLINE_INITIALIZED=True def go_online(): py_offline.__PLOTLY_OFFLINE_INITIALIZED=False ...
e7aca443a1a0597bdb7c69406e1dbefb99df1624
src/main/java/ee/tuleva/onboarding/config/DigiDocConfiguration.java
src/main/java/ee/tuleva/onboarding/config/DigiDocConfiguration.java
package ee.tuleva.onboarding.config; import static org.digidoc4j.Configuration.Mode.PROD; import static org.digidoc4j.Configuration.Mode.TEST; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; @Configuration public class DigiDocConfiguration { @Bean @Profile("production") public org.digidoc4j.Configuration digiDocConfigProd() { return new org.digidoc4j.Configuration(PROD); } @Bean @ConditionalOnMissingBean(org.digidoc4j.Configuration.class) public org.digidoc4j.Configuration digiDocConfigDev() { // use PROD for testing signing return new org.digidoc4j.Configuration(TEST); } }
package ee.tuleva.onboarding.config; import static org.digidoc4j.Configuration.Mode.PROD; import static org.digidoc4j.Configuration.Mode.TEST; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; @Configuration public class DigiDocConfiguration { @Bean @Profile({"production", "staging"}) public org.digidoc4j.Configuration digiDocConfigProd() { return new org.digidoc4j.Configuration(PROD); } @Bean @ConditionalOnMissingBean(org.digidoc4j.Configuration.class) public org.digidoc4j.Configuration digiDocConfigDev() { // use PROD for testing signing return new org.digidoc4j.Configuration(TEST); } }
Add staging profile to digidoc
Add staging profile to digidoc
Java
mit
TulevaEE/onboarding-service,TulevaEE/onboarding-service,TulevaEE/onboarding-service,TulevaEE/onboarding-service
java
## Code Before: package ee.tuleva.onboarding.config; import static org.digidoc4j.Configuration.Mode.PROD; import static org.digidoc4j.Configuration.Mode.TEST; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; @Configuration public class DigiDocConfiguration { @Bean @Profile("production") public org.digidoc4j.Configuration digiDocConfigProd() { return new org.digidoc4j.Configuration(PROD); } @Bean @ConditionalOnMissingBean(org.digidoc4j.Configuration.class) public org.digidoc4j.Configuration digiDocConfigDev() { // use PROD for testing signing return new org.digidoc4j.Configuration(TEST); } } ## Instruction: Add staging profile to digidoc ## Code After: package ee.tuleva.onboarding.config; import static org.digidoc4j.Configuration.Mode.PROD; import static org.digidoc4j.Configuration.Mode.TEST; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; @Configuration public class DigiDocConfiguration { @Bean @Profile({"production", "staging"}) public org.digidoc4j.Configuration digiDocConfigProd() { return new org.digidoc4j.Configuration(PROD); } @Bean @ConditionalOnMissingBean(org.digidoc4j.Configuration.class) public org.digidoc4j.Configuration digiDocConfigDev() { // use PROD for testing signing return new org.digidoc4j.Configuration(TEST); } }
... public class DigiDocConfiguration { @Bean @Profile({"production", "staging"}) public org.digidoc4j.Configuration digiDocConfigProd() { return new org.digidoc4j.Configuration(PROD); } ...
905628048f5306cb5cc96e24eda40e86c6b44c62
src/dialogwindow.h
src/dialogwindow.h
class EditorGUI; class Structure; class DialogWindow : public nanogui::Window { public: DialogWindow(EditorGUI *screen, nanogui::Theme *theme); nanogui::Window *getWindow() { return this; } Structure *structure() { return current_structure; } void setStructure( Structure *s); void loadStructure( Structure *s); void clear(); private: EditorGUI *gui; Structure *current_structure; };
class EditorGUI; class Structure; class DialogWindow : public nanogui::Window { public: DialogWindow(EditorGUI *screen, nanogui::Theme *theme); nanogui::Window *getWindow() { return this; } Structure *structure() { return current_structure; } void setStructure( Structure *s); void loadStructure( Structure *s); void clear(); private: EditorGUI *gui = nullptr; Structure *current_structure = nullptr; };
Fix crash due to uninitialised pointer when displaying a dialog
Fix crash due to uninitialised pointer when displaying a dialog
C
bsd-3-clause
latproc/humid,latproc/humid
c
## Code Before: class EditorGUI; class Structure; class DialogWindow : public nanogui::Window { public: DialogWindow(EditorGUI *screen, nanogui::Theme *theme); nanogui::Window *getWindow() { return this; } Structure *structure() { return current_structure; } void setStructure( Structure *s); void loadStructure( Structure *s); void clear(); private: EditorGUI *gui; Structure *current_structure; }; ## Instruction: Fix crash due to uninitialised pointer when displaying a dialog ## Code After: class EditorGUI; class Structure; class DialogWindow : public nanogui::Window { public: DialogWindow(EditorGUI *screen, nanogui::Theme *theme); nanogui::Window *getWindow() { return this; } Structure *structure() { return current_structure; } void setStructure( Structure *s); void loadStructure( Structure *s); void clear(); private: EditorGUI *gui = nullptr; Structure *current_structure = nullptr; };
... void loadStructure( Structure *s); void clear(); private: EditorGUI *gui = nullptr; Structure *current_structure = nullptr; }; ...
798e1889b6743be7d18a944a46442468004c8ed5
2DXngine.Test/src/Integration_Tiled/TiledFixture.h
2DXngine.Test/src/Integration_Tiled/TiledFixture.h
class TiledFixture : public ::testing::Test { protected: virtual void SetUp() { pugi::xml_document doc; auto parseResult = doc.load_file("./Content/TestMap.tmx"); auto parser = TileMapParser(); this->_parsedMap = parser.parse(doc.child("map")); } virtual void TearDown() { delete _parsedMap; } TiledMap * _parsedMap; };
class TiledFixture : public ::testing::Test { protected: virtual void SetUp() { AssetPath path = AssetPath::create("Content\\TestMap.tmx"); pugi::xml_document doc; auto parseResult = doc.load_file(path.get_fullPath().c_str()); auto parser = TileMapParser(); this->_parsedMap = parser.parse(doc.child("map")); } virtual void TearDown() { delete _parsedMap; } TiledMap * _parsedMap; };
Fix path to map xml
EDIT: Fix path to map xml
C
mit
Harunx9/2DXngine,Harunx9/2DXngine,Harunx9/2DXngine
c
## Code Before: class TiledFixture : public ::testing::Test { protected: virtual void SetUp() { pugi::xml_document doc; auto parseResult = doc.load_file("./Content/TestMap.tmx"); auto parser = TileMapParser(); this->_parsedMap = parser.parse(doc.child("map")); } virtual void TearDown() { delete _parsedMap; } TiledMap * _parsedMap; }; ## Instruction: EDIT: Fix path to map xml ## Code After: class TiledFixture : public ::testing::Test { protected: virtual void SetUp() { AssetPath path = AssetPath::create("Content\\TestMap.tmx"); pugi::xml_document doc; auto parseResult = doc.load_file(path.get_fullPath().c_str()); auto parser = TileMapParser(); this->_parsedMap = parser.parse(doc.child("map")); } virtual void TearDown() { delete _parsedMap; } TiledMap * _parsedMap; };
// ... existing code ... protected: virtual void SetUp() { AssetPath path = AssetPath::create("Content\\TestMap.tmx"); pugi::xml_document doc; auto parseResult = doc.load_file(path.get_fullPath().c_str()); auto parser = TileMapParser(); this->_parsedMap = parser.parse(doc.child("map")); // ... rest of the code ...
44c174807d7362b5d7959f122f2a74ae9ccb7b38
coney/request.py
coney/request.py
from .exceptions import MalformedRequestException class Request(object): def __init__(self, version, metadata, **kwargs): self._version = version self._metadata = metadata self._arguments = kwargs @property def version(self): return self._version @property def arguments(self): return self._arguments @property def metadata(self): return self._metadata @staticmethod def loads(s, serializer): try: l = serializer.loads(s) except(ValueError, TypeError): raise MalformedRequestException(serializer.__name__, s) try: version, metadata, args = l[0:3] except ValueError: raise MalformedRequestException(serializer.__name__, s) else: return Request(version, metadata, args) @staticmethod def dumps(obj, serializer): return serializer.dumps([obj.version, obj.metadata, obj.arguments])
from .exceptions import MalformedRequestException class Request(object): def __init__(self, version, metadata, arguments): self._version = version self._metadata = metadata self._arguments = arguments @property def version(self): return self._version @property def arguments(self): return self._arguments @property def metadata(self): return self._metadata @staticmethod def loads(s, serializer): try: l = serializer.loads(s) except(ValueError, TypeError): raise MalformedRequestException(serializer.__name__, s) try: version, metadata, args = l[0:3] except ValueError: raise MalformedRequestException(serializer.__name__, s) else: return Request(version, metadata, args) @staticmethod def dumps(obj, serializer): return serializer.dumps([obj.version, obj.metadata, obj.arguments])
Fix rpc argument handling when constructing a Request
Fix rpc argument handling when constructing a Request
Python
mit
cbigler/jackrabbit
python
## Code Before: from .exceptions import MalformedRequestException class Request(object): def __init__(self, version, metadata, **kwargs): self._version = version self._metadata = metadata self._arguments = kwargs @property def version(self): return self._version @property def arguments(self): return self._arguments @property def metadata(self): return self._metadata @staticmethod def loads(s, serializer): try: l = serializer.loads(s) except(ValueError, TypeError): raise MalformedRequestException(serializer.__name__, s) try: version, metadata, args = l[0:3] except ValueError: raise MalformedRequestException(serializer.__name__, s) else: return Request(version, metadata, args) @staticmethod def dumps(obj, serializer): return serializer.dumps([obj.version, obj.metadata, obj.arguments]) ## Instruction: Fix rpc argument handling when constructing a Request ## Code After: from .exceptions import MalformedRequestException class Request(object): def __init__(self, version, metadata, arguments): self._version = version self._metadata = metadata self._arguments = arguments @property def version(self): return self._version @property def arguments(self): return self._arguments @property def metadata(self): return self._metadata @staticmethod def loads(s, serializer): try: l = serializer.loads(s) except(ValueError, TypeError): raise MalformedRequestException(serializer.__name__, s) try: version, metadata, args = l[0:3] except ValueError: raise MalformedRequestException(serializer.__name__, s) else: return Request(version, metadata, args) @staticmethod def dumps(obj, serializer): return serializer.dumps([obj.version, obj.metadata, obj.arguments])
// ... existing code ... class Request(object): def __init__(self, version, metadata, arguments): self._version = version self._metadata = metadata self._arguments = arguments @property def version(self): // ... rest of the code ...
457c2a9d8bcc13c4f8d35bd1077ce8e8d1868142
src/main/java/com/aemreunal/helper/json/JsonArrayBuilder.java
src/main/java/com/aemreunal/helper/json/JsonArrayBuilder.java
package com.aemreunal.helper.json; /* * *********************** * * Copyright (c) 2015 * * * * This code belongs to: * * * * @author Ahmet Emre Ünal * * S001974 * * * * [email protected] * * [email protected] * * * * aemreunal.com * * *********************** * */ import net.minidev.json.JSONArray; import java.util.ArrayList; public class JsonArrayBuilder { private ArrayList<Object> jsonList = new ArrayList<>(); JsonArrayBuilder() { } public JsonArrayBuilder add(Object value) { jsonList.add(value); return this; } public JSONArray build() { JSONArray array = new JSONArray(); array.addAll(jsonList); return array; } }
package com.aemreunal.helper.json; /* * *********************** * * Copyright (c) 2015 * * * * This code belongs to: * * * * @author Ahmet Emre Ünal * * S001974 * * * * [email protected] * * [email protected] * * * * aemreunal.com * * *********************** * */ import net.minidev.json.JSONArray; import java.util.ArrayList; import java.util.Collection; public class JsonArrayBuilder { private ArrayList<Object> jsonList = new ArrayList<>(); JsonArrayBuilder() { } public JsonArrayBuilder add(Object item) { jsonList.add(item); return this; } public JsonArrayBuilder addAll(Collection items) { jsonList.addAll(items); return this; } public JSONArray build() { JSONArray array = new JSONArray(); array.addAll(jsonList); return array; } }
Add collections to JSON arrays
Add collections to JSON arrays
Java
mit
aemreunal/iBeaconServer,aemreunal/iBeaconServer
java
## Code Before: package com.aemreunal.helper.json; /* * *********************** * * Copyright (c) 2015 * * * * This code belongs to: * * * * @author Ahmet Emre Ünal * * S001974 * * * * [email protected] * * [email protected] * * * * aemreunal.com * * *********************** * */ import net.minidev.json.JSONArray; import java.util.ArrayList; public class JsonArrayBuilder { private ArrayList<Object> jsonList = new ArrayList<>(); JsonArrayBuilder() { } public JsonArrayBuilder add(Object value) { jsonList.add(value); return this; } public JSONArray build() { JSONArray array = new JSONArray(); array.addAll(jsonList); return array; } } ## Instruction: Add collections to JSON arrays ## Code After: package com.aemreunal.helper.json; /* * *********************** * * Copyright (c) 2015 * * * * This code belongs to: * * * * @author Ahmet Emre Ünal * * S001974 * * * * [email protected] * * [email protected] * * * * aemreunal.com * * *********************** * */ import net.minidev.json.JSONArray; import java.util.ArrayList; import java.util.Collection; public class JsonArrayBuilder { private ArrayList<Object> jsonList = new ArrayList<>(); JsonArrayBuilder() { } public JsonArrayBuilder add(Object item) { jsonList.add(item); return this; } public JsonArrayBuilder addAll(Collection items) { jsonList.addAll(items); return this; } public JSONArray build() { JSONArray array = new JSONArray(); array.addAll(jsonList); return array; } }
# ... existing code ... import net.minidev.json.JSONArray; import java.util.ArrayList; import java.util.Collection; public class JsonArrayBuilder { private ArrayList<Object> jsonList = new ArrayList<>(); # ... modified code ... JsonArrayBuilder() { } public JsonArrayBuilder add(Object item) { jsonList.add(item); return this; } public JsonArrayBuilder addAll(Collection items) { jsonList.addAll(items); return this; } # ... rest of the code ...
6dc47f932b5c7f84918ec730b3ccd03d74070453
app/py/cuda_sort/app_specific.py
app/py/cuda_sort/app_specific.py
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
Sort plg: fix caret pos after 'delete empty lines'
Sort plg: fix caret pos after 'delete empty lines'
Python
mpl-2.0
Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText
python
## Code Before: import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines() ## Instruction: Sort plg: fix caret pos after 'delete empty lines' ## Code After: import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
... def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) ...
81d5364248de03a08b75c78863769dc7698ce285
src/test/java/org/embulk/input/TestRemoteFileInputPlugin.java
src/test/java/org/embulk/input/TestRemoteFileInputPlugin.java
package org.embulk.input; import com.google.common.base.Optional; import org.embulk.EmbulkTestRuntime; import org.embulk.config.ConfigSource; import org.embulk.spi.Exec; import org.junit.Rule; import org.junit.Test; import java.util.Collections; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; public class TestRemoteFileInputPlugin { @Rule public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); @Test public void checkDefaultValues() { ConfigSource config = Exec.newConfigSource(); RemoteFileInputPlugin.PluginTask task = config.loadConfig(RemoteFileInputPlugin.PluginTask.class); assertThat(task.getHosts(), is(Collections.<String>emptyList())); assertThat(task.getHostsCommand(), is(Optional.<String>absent())); assertThat(task.getHostsSeparator(), is(" ")); assertThat(task.getPath(), is("")); assertThat(task.getPathCommand(), is(Optional.<String>absent())); assertThat(task.getAuth(), is(Collections.<String, String>emptyMap())); assertThat(task.getLastTarget(), is(Optional.<RemoteFileInputPlugin.Target>absent())); } }
package org.embulk.input; //import com.google.common.base.Optional; //import org.embulk.EmbulkTestRuntime; //import org.embulk.config.ConfigSource; //import org.embulk.spi.Exec; //import org.junit.Rule; //import org.junit.Test; // //import java.util.Collections; // //import static org.hamcrest.CoreMatchers.is; //import static org.junit.Assert.assertThat; public class TestRemoteFileInputPlugin { // @Rule // public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); // // @Test // public void checkDefaultValues() // { // ConfigSource config = Exec.newConfigSource(); // // RemoteFileInputPlugin.PluginTask task = config.loadConfig(RemoteFileInputPlugin.PluginTask.class); // assertThat(task.getHosts(), is(Collections.<String>emptyList())); // assertThat(task.getHostsCommand(), is(Optional.<String>absent())); // assertThat(task.getHostsSeparator(), is(" ")); // assertThat(task.getPath(), is("")); // assertThat(task.getPathCommand(), is(Optional.<String>absent())); // assertThat(task.getAuth(), is(Collections.<String, String>emptyMap())); // assertThat(task.getLastTarget(), is(Optional.<RemoteFileInputPlugin.Target>absent())); // } }
Comment out test. (Can't compile)
Comment out test. (Can't compile)
Java
mit
kamatama41/embulk-input-remote,kamatama41/embulk-input-remote,kamatama41/embulk-input-remote
java
## Code Before: package org.embulk.input; import com.google.common.base.Optional; import org.embulk.EmbulkTestRuntime; import org.embulk.config.ConfigSource; import org.embulk.spi.Exec; import org.junit.Rule; import org.junit.Test; import java.util.Collections; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; public class TestRemoteFileInputPlugin { @Rule public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); @Test public void checkDefaultValues() { ConfigSource config = Exec.newConfigSource(); RemoteFileInputPlugin.PluginTask task = config.loadConfig(RemoteFileInputPlugin.PluginTask.class); assertThat(task.getHosts(), is(Collections.<String>emptyList())); assertThat(task.getHostsCommand(), is(Optional.<String>absent())); assertThat(task.getHostsSeparator(), is(" ")); assertThat(task.getPath(), is("")); assertThat(task.getPathCommand(), is(Optional.<String>absent())); assertThat(task.getAuth(), is(Collections.<String, String>emptyMap())); assertThat(task.getLastTarget(), is(Optional.<RemoteFileInputPlugin.Target>absent())); } } ## Instruction: Comment out test. (Can't compile) ## Code After: package org.embulk.input; //import com.google.common.base.Optional; //import org.embulk.EmbulkTestRuntime; //import org.embulk.config.ConfigSource; //import org.embulk.spi.Exec; //import org.junit.Rule; //import org.junit.Test; // //import java.util.Collections; // //import static org.hamcrest.CoreMatchers.is; //import static org.junit.Assert.assertThat; public class TestRemoteFileInputPlugin { // @Rule // public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); // // @Test // public void checkDefaultValues() // { // ConfigSource config = Exec.newConfigSource(); // // RemoteFileInputPlugin.PluginTask task = config.loadConfig(RemoteFileInputPlugin.PluginTask.class); // assertThat(task.getHosts(), is(Collections.<String>emptyList())); // assertThat(task.getHostsCommand(), is(Optional.<String>absent())); // assertThat(task.getHostsSeparator(), is(" ")); // assertThat(task.getPath(), is("")); // assertThat(task.getPathCommand(), is(Optional.<String>absent())); // assertThat(task.getAuth(), is(Collections.<String, String>emptyMap())); // assertThat(task.getLastTarget(), is(Optional.<RemoteFileInputPlugin.Target>absent())); // } }
# ... existing code ... package org.embulk.input; //import com.google.common.base.Optional; //import org.embulk.EmbulkTestRuntime; //import org.embulk.config.ConfigSource; //import org.embulk.spi.Exec; //import org.junit.Rule; //import org.junit.Test; // //import java.util.Collections; // //import static org.hamcrest.CoreMatchers.is; //import static org.junit.Assert.assertThat; public class TestRemoteFileInputPlugin { // @Rule // public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); // // @Test // public void checkDefaultValues() // { // ConfigSource config = Exec.newConfigSource(); // // RemoteFileInputPlugin.PluginTask task = config.loadConfig(RemoteFileInputPlugin.PluginTask.class); // assertThat(task.getHosts(), is(Collections.<String>emptyList())); // assertThat(task.getHostsCommand(), is(Optional.<String>absent())); // assertThat(task.getHostsSeparator(), is(" ")); // assertThat(task.getPath(), is("")); // assertThat(task.getPathCommand(), is(Optional.<String>absent())); // assertThat(task.getAuth(), is(Collections.<String, String>emptyMap())); // assertThat(task.getLastTarget(), is(Optional.<RemoteFileInputPlugin.Target>absent())); // } } # ... rest of the code ...
e87490ea157f4882f644329e4b447f51c0a2acb3
benchmarks/bench_vectorize.py
benchmarks/bench_vectorize.py
import numpy as np from numba import vectorize @vectorize(["float32(float32, float32)", "float64(float64, float64)", "complex64(complex64, complex64)", "complex128(complex128, complex128)"]) def mul(x, y): return x * y class TimeSuite: n = 10000 dtypes = ('float32', 'float64', 'complex64', 'complex128') def setup(self): self.samples = {} self.out = {} for dtype in self.dtypes: self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype) self.out[dtype] = np.zeros(self.n, dtype=dtype) def _binary_func(func, dtype): def f(self): func(self.samples[dtype], self.samples[dtype], self.out[dtype]) return f for dtype in dtypes: locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype) del _binary_func
import numpy as np from numba import vectorize @vectorize(["float32(float32, float32)", "float64(float64, float64)", "complex64(complex64, complex64)", "complex128(complex128, complex128)"]) def mul(x, y): return x * y @vectorize(["float32(float32, float32)", "float64(float64, float64)"]) def rel_diff(x, y): # XXX for float32 performance, we should write `np.float32(2)`, but # that's not the natural way to write this code... return 2 * (x - y) / (x + y) class TimeSuite: n = 10000 dtypes = ('float32', 'float64', 'complex64', 'complex128') def setup(self): self.samples = {} self.out = {} for dtype in self.dtypes: self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype) self.out[dtype] = np.zeros(self.n, dtype=dtype) def _binary_func(func, dtype): def f(self): func(self.samples[dtype], self.samples[dtype], self.out[dtype]) return f for dtype in dtypes: locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype) time_rel_diff_float32 = _binary_func(rel_diff, 'float32') time_rel_diff_float64 = _binary_func(rel_diff, 'float64') del _binary_func
Add a relative difference vectorization benchmark
Add a relative difference vectorization benchmark
Python
bsd-2-clause
gmarkall/numba-benchmark,numba/numba-benchmark
python
## Code Before: import numpy as np from numba import vectorize @vectorize(["float32(float32, float32)", "float64(float64, float64)", "complex64(complex64, complex64)", "complex128(complex128, complex128)"]) def mul(x, y): return x * y class TimeSuite: n = 10000 dtypes = ('float32', 'float64', 'complex64', 'complex128') def setup(self): self.samples = {} self.out = {} for dtype in self.dtypes: self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype) self.out[dtype] = np.zeros(self.n, dtype=dtype) def _binary_func(func, dtype): def f(self): func(self.samples[dtype], self.samples[dtype], self.out[dtype]) return f for dtype in dtypes: locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype) del _binary_func ## Instruction: Add a relative difference vectorization benchmark ## Code After: import numpy as np from numba import vectorize @vectorize(["float32(float32, float32)", "float64(float64, float64)", "complex64(complex64, complex64)", "complex128(complex128, complex128)"]) def mul(x, y): return x * y @vectorize(["float32(float32, float32)", "float64(float64, float64)"]) def rel_diff(x, y): # XXX for float32 performance, we should write `np.float32(2)`, but # that's not the natural way to write this code... return 2 * (x - y) / (x + y) class TimeSuite: n = 10000 dtypes = ('float32', 'float64', 'complex64', 'complex128') def setup(self): self.samples = {} self.out = {} for dtype in self.dtypes: self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype) self.out[dtype] = np.zeros(self.n, dtype=dtype) def _binary_func(func, dtype): def f(self): func(self.samples[dtype], self.samples[dtype], self.out[dtype]) return f for dtype in dtypes: locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype) time_rel_diff_float32 = _binary_func(rel_diff, 'float32') time_rel_diff_float64 = _binary_func(rel_diff, 'float64') del _binary_func
# ... existing code ... return x * y @vectorize(["float32(float32, float32)", "float64(float64, float64)"]) def rel_diff(x, y): # XXX for float32 performance, we should write `np.float32(2)`, but # that's not the natural way to write this code... return 2 * (x - y) / (x + y) class TimeSuite: n = 10000 # ... modified code ... self.samples = {} self.out = {} for dtype in self.dtypes: self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype) self.out[dtype] = np.zeros(self.n, dtype=dtype) def _binary_func(func, dtype): ... for dtype in dtypes: locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype) time_rel_diff_float32 = _binary_func(rel_diff, 'float32') time_rel_diff_float64 = _binary_func(rel_diff, 'float64') del _binary_func # ... rest of the code ...
e9784ddaeea80311587ed7b255869d791008b29e
modder/gui/trayicon.py
modder/gui/trayicon.py
import os.path import platform import wx def create_menu_item(menu, label, func=None): menu_item = wx.MenuItem(menu, -1, label) if callable(func): menu.Bind(wx.EVT_MENU, func, id=menu_item.GetId()) else: menu_item.Enable(False) menu.AppendItem(menu_item) return menu_item class TrayIcon(wx.TaskBarIcon): icon_fpath = os.path.join( os.path.dirname(__file__), 'resources', 'icons8-Module-64.png' ) def __init__(self, frame=None): super(TrayIcon, self).__init__() self._frame = frame or wx.Frame(None) self.SetIcon(wx.Icon(self.icon_fpath, wx.BITMAP_TYPE_PNG)) def CreatePopupMenu(self): menu = wx.Menu() create_menu_item(menu, 'Modder') menu.AppendSeparator() mods_count = wx.GetApp()._manager.count create_menu_item(menu, '{:d} mods loaded'.format(mods_count)) create_menu_item(menu, 'Exit', self.on_exit) return menu def on_exit(self, evt): wx.CallAfter(self.Destroy) self._frame.Close()
import os.path import platform import wx def create_menu_item(menu, label, func=None): menu_item = wx.MenuItem(menu, -1, label) if callable(func): menu.Bind(wx.EVT_MENU, func, id=menu_item.GetId()) else: menu_item.Enable(False) menu.AppendItem(menu_item) return menu_item class TrayIcon(wx.TaskBarIcon): icon_fpath = os.path.join( os.path.dirname(__file__), 'resources', 'icons8-Module-64.png' ) def __init__(self, frame=None): super(TrayIcon, self).__init__() self._frame = frame or wx.Frame(None) self.SetIcon(wx.Icon(self.icon_fpath, wx.BITMAP_TYPE_PNG)) def CreatePopupMenu(self): menu = wx.Menu() create_menu_item(menu, 'Modder') menu.AppendSeparator() mods_count = wx.GetApp()._manager.count create_menu_item(menu, '{:d} mods loaded'.format(mods_count), self.on_manage_mods) create_menu_item(menu, 'Exit', self.on_exit) return menu def on_manage_mods(self, evt): pass def on_exit(self, evt): wx.CallAfter(self.Destroy) self._frame.Close()
Add stub for mod manager GUI
Add stub for mod manager GUI
Python
mit
JokerQyou/Modder2
python
## Code Before: import os.path import platform import wx def create_menu_item(menu, label, func=None): menu_item = wx.MenuItem(menu, -1, label) if callable(func): menu.Bind(wx.EVT_MENU, func, id=menu_item.GetId()) else: menu_item.Enable(False) menu.AppendItem(menu_item) return menu_item class TrayIcon(wx.TaskBarIcon): icon_fpath = os.path.join( os.path.dirname(__file__), 'resources', 'icons8-Module-64.png' ) def __init__(self, frame=None): super(TrayIcon, self).__init__() self._frame = frame or wx.Frame(None) self.SetIcon(wx.Icon(self.icon_fpath, wx.BITMAP_TYPE_PNG)) def CreatePopupMenu(self): menu = wx.Menu() create_menu_item(menu, 'Modder') menu.AppendSeparator() mods_count = wx.GetApp()._manager.count create_menu_item(menu, '{:d} mods loaded'.format(mods_count)) create_menu_item(menu, 'Exit', self.on_exit) return menu def on_exit(self, evt): wx.CallAfter(self.Destroy) self._frame.Close() ## Instruction: Add stub for mod manager GUI ## Code After: import os.path import platform import wx def create_menu_item(menu, label, func=None): menu_item = wx.MenuItem(menu, -1, label) if callable(func): menu.Bind(wx.EVT_MENU, func, id=menu_item.GetId()) else: menu_item.Enable(False) menu.AppendItem(menu_item) return menu_item class TrayIcon(wx.TaskBarIcon): icon_fpath = os.path.join( os.path.dirname(__file__), 'resources', 'icons8-Module-64.png' ) def __init__(self, frame=None): super(TrayIcon, self).__init__() self._frame = frame or wx.Frame(None) self.SetIcon(wx.Icon(self.icon_fpath, wx.BITMAP_TYPE_PNG)) def CreatePopupMenu(self): menu = wx.Menu() create_menu_item(menu, 'Modder') menu.AppendSeparator() mods_count = wx.GetApp()._manager.count create_menu_item(menu, '{:d} mods loaded'.format(mods_count), self.on_manage_mods) create_menu_item(menu, 'Exit', self.on_exit) return menu def on_manage_mods(self, evt): pass def on_exit(self, evt): wx.CallAfter(self.Destroy) self._frame.Close()
// ... existing code ... menu.AppendSeparator() mods_count = wx.GetApp()._manager.count create_menu_item(menu, '{:d} mods loaded'.format(mods_count), self.on_manage_mods) create_menu_item(menu, 'Exit', self.on_exit) return menu def on_manage_mods(self, evt): pass def on_exit(self, evt): wx.CallAfter(self.Destroy) // ... rest of the code ...
aa9a77c33014faaf53ab390d971334ade1d3b7af
buildSrc/subprojects/profiling/profiling.gradle.kts
buildSrc/subprojects/profiling/profiling.gradle.kts
import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins plugins { `java-gradle-plugin` } apply(plugin = "org.gradle.kotlin.kotlin-dsl") apply<PrecompiledScriptPlugins>() dependencies { implementation("me.champeau.gradle:jmh-gradle-plugin:0.4.7") implementation("org.jsoup:jsoup:1.11.2") implementation("com.gradle:build-scan-plugin:1.16-rc-1") implementation(project(":configuration")) implementation(project(":kotlinDsl")) } gradlePlugin { plugins { register("buildscan") { id = "gradlebuild.buildscan" implementationClass = "org.gradle.gradlebuild.profiling.buildscan.BuildScanPlugin" } } }
import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins plugins { `java-gradle-plugin` } apply(plugin = "org.gradle.kotlin.kotlin-dsl") apply<PrecompiledScriptPlugins>() dependencies { implementation("me.champeau.gradle:jmh-gradle-plugin:0.4.7") implementation("org.jsoup:jsoup:1.11.2") implementation("com.gradle:build-scan-plugin:1.16-rc-1-20180817192640-enterprise_release") implementation(project(":configuration")) implementation(project(":kotlinDsl")) } gradlePlugin { plugins { register("buildscan") { id = "gradlebuild.buildscan" implementationClass = "org.gradle.gradlebuild.profiling.buildscan.BuildScanPlugin" } } }
Use the 'proper' version for the 1.16-rc-1 plugin
Use the 'proper' version for the 1.16-rc-1 plugin -- Enterprise releases are 10% more releasey
Kotlin
apache-2.0
lsmaira/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle
kotlin
## Code Before: import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins plugins { `java-gradle-plugin` } apply(plugin = "org.gradle.kotlin.kotlin-dsl") apply<PrecompiledScriptPlugins>() dependencies { implementation("me.champeau.gradle:jmh-gradle-plugin:0.4.7") implementation("org.jsoup:jsoup:1.11.2") implementation("com.gradle:build-scan-plugin:1.16-rc-1") implementation(project(":configuration")) implementation(project(":kotlinDsl")) } gradlePlugin { plugins { register("buildscan") { id = "gradlebuild.buildscan" implementationClass = "org.gradle.gradlebuild.profiling.buildscan.BuildScanPlugin" } } } ## Instruction: Use the 'proper' version for the 1.16-rc-1 plugin -- Enterprise releases are 10% more releasey ## Code After: import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins plugins { `java-gradle-plugin` } apply(plugin = "org.gradle.kotlin.kotlin-dsl") apply<PrecompiledScriptPlugins>() dependencies { implementation("me.champeau.gradle:jmh-gradle-plugin:0.4.7") implementation("org.jsoup:jsoup:1.11.2") implementation("com.gradle:build-scan-plugin:1.16-rc-1-20180817192640-enterprise_release") implementation(project(":configuration")) implementation(project(":kotlinDsl")) } gradlePlugin { plugins { register("buildscan") { id = "gradlebuild.buildscan" implementationClass = "org.gradle.gradlebuild.profiling.buildscan.BuildScanPlugin" } } }
... dependencies { implementation("me.champeau.gradle:jmh-gradle-plugin:0.4.7") implementation("org.jsoup:jsoup:1.11.2") implementation("com.gradle:build-scan-plugin:1.16-rc-1-20180817192640-enterprise_release") implementation(project(":configuration")) implementation(project(":kotlinDsl")) } ...
92caf20511f42cae8dd45fa982d538c8b96161c5
include/base/types.h
include/base/types.h
/* -------------------------------------------------------------------------- * Name: types.h * Purpose: Various typedefs and utility macros * ----------------------------------------------------------------------- */ #ifndef TYPES_H #define TYPES_H typedef signed char int8_t; typedef signed short int16_t; typedef signed int int32_t; typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; #ifdef _WIN32 typedef int intptr_t; #endif #define NELEMS(x) ((int) (sizeof(x) / sizeof(x[0]))) #define MIN(x,y) ((x) < (y) ? (x) : (y)) #define MAX(x,y) ((x) > (y) ? (x) : (y)) #define NOT_USED(x) ((x) = (x)) #ifdef _WIN32 #define INLINE __inline #else #define INLINE __inline__ #endif #ifdef __GNUC__ #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else #define likely(x) (x) #define unlikely(x) (x) #endif #endif /* TYPES_H */
/* -------------------------------------------------------------------------- * Name: types.h * Purpose: Various typedefs and utility macros * ----------------------------------------------------------------------- */ #ifndef TYPES_H #define TYPES_H typedef signed char int8_t; typedef signed short int16_t; typedef signed int int32_t; typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; #ifdef _MSC_VER #ifdef _WIN64 typedef __int64 intptr_t; #else typedef int intptr_t; #endif #endif #define NELEMS(x) ((int) (sizeof(x) / sizeof(x[0]))) #define MIN(x,y) ((x) < (y) ? (x) : (y)) #define MAX(x,y) ((x) > (y) ? (x) : (y)) #define NOT_USED(x) ((x) = (x)) #ifdef _WIN32 #define INLINE __inline #else #define INLINE __inline__ #endif #ifdef __GNUC__ #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else #define likely(x) (x) #define unlikely(x) (x) #endif #endif /* TYPES_H */
Declare intptr_t correctly for 64-bit Windows builds.
Declare intptr_t correctly for 64-bit Windows builds.
C
bsd-2-clause
dpt/Containers,dpt/Containers
c
## Code Before: /* -------------------------------------------------------------------------- * Name: types.h * Purpose: Various typedefs and utility macros * ----------------------------------------------------------------------- */ #ifndef TYPES_H #define TYPES_H typedef signed char int8_t; typedef signed short int16_t; typedef signed int int32_t; typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; #ifdef _WIN32 typedef int intptr_t; #endif #define NELEMS(x) ((int) (sizeof(x) / sizeof(x[0]))) #define MIN(x,y) ((x) < (y) ? (x) : (y)) #define MAX(x,y) ((x) > (y) ? (x) : (y)) #define NOT_USED(x) ((x) = (x)) #ifdef _WIN32 #define INLINE __inline #else #define INLINE __inline__ #endif #ifdef __GNUC__ #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else #define likely(x) (x) #define unlikely(x) (x) #endif #endif /* TYPES_H */ ## Instruction: Declare intptr_t correctly for 64-bit Windows builds. ## Code After: /* -------------------------------------------------------------------------- * Name: types.h * Purpose: Various typedefs and utility macros * ----------------------------------------------------------------------- */ #ifndef TYPES_H #define TYPES_H typedef signed char int8_t; typedef signed short int16_t; typedef signed int int32_t; typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; #ifdef _MSC_VER #ifdef _WIN64 typedef __int64 intptr_t; #else typedef int intptr_t; #endif #endif #define NELEMS(x) ((int) (sizeof(x) / sizeof(x[0]))) #define MIN(x,y) ((x) < (y) ? (x) : (y)) #define MAX(x,y) ((x) > (y) ? (x) : (y)) #define NOT_USED(x) ((x) = (x)) #ifdef _WIN32 #define INLINE __inline #else #define INLINE __inline__ #endif #ifdef __GNUC__ #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else #define likely(x) (x) #define unlikely(x) (x) #endif #endif /* TYPES_H */
# ... existing code ... typedef unsigned short uint16_t; typedef unsigned int uint32_t; #ifdef _MSC_VER #ifdef _WIN64 typedef __int64 intptr_t; #else typedef int intptr_t; #endif #endif #define NELEMS(x) ((int) (sizeof(x) / sizeof(x[0]))) # ... rest of the code ...
83e48445400c8556a7ef8f9064965b9d77e3d877
tools/build_interface_docs.py
tools/build_interface_docs.py
# stdlib imports import os import sys #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) # local imports from interfacedocgen import InterfaceHelpWriter package = 'nipype' outdir = os.path.join('interfaces','generated') docwriter = InterfaceHelpWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.pipeline', '.\testing', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.interfaces\.base$', '\.interfaces\.matlab$', '\.interfaces\.rest$', '\.interfaces\.pymvpa$', '\.interfaces\.traits', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', '.\testing', ] docwriter.class_skip_patterns += ['FSL', 'FS', 'Spm', 'Tester', 'Spec$', 'afni', 'Numpy' ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='interfaces') print '%d files written' % len(docwriter.written_modules)
# stdlib imports import os import sys #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) # local imports from interfacedocgen import InterfaceHelpWriter package = 'nipype' outdir = os.path.join('interfaces','generated') docwriter = InterfaceHelpWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.pipeline', '.\testing', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.interfaces\.base$', '\.interfaces\.matlab$', '\.interfaces\.rest$', '\.interfaces\.pymvpa$', '\.interfaces\.traits', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', '.\testing', ] docwriter.class_skip_patterns += ['FSL', 'FS', 'Spm', 'Tester', 'Spec$', 'afni', 'Numpy' # NipypeTester raises an # exception when instantiated in # InterfaceHelpWriter.generate_api_doc 'NipypeTester', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='interfaces') print '%d files written' % len(docwriter.written_modules)
Remove NipypeTester from doc generation.
Remove NipypeTester from doc generation. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00
Python
bsd-3-clause
FCP-INDI/nipype,pearsonlab/nipype,blakedewey/nipype,dmordom/nipype,dgellis90/nipype,fprados/nipype,Leoniela/nipype,mick-d/nipype_source,wanderine/nipype,mick-d/nipype,sgiavasis/nipype,rameshvs/nipype,gerddie/nipype,grlee77/nipype,arokem/nipype,christianbrodbeck/nipype,carolFrohlich/nipype,rameshvs/nipype,carlohamalainen/nipype,satra/NiPypeold,FCP-INDI/nipype,sgiavasis/nipype,carolFrohlich/nipype,FredLoney/nipype,glatard/nipype,pearsonlab/nipype,carlohamalainen/nipype,glatard/nipype,dgellis90/nipype,JohnGriffiths/nipype,fprados/nipype,mick-d/nipype_source,christianbrodbeck/nipype,wanderine/nipype,iglpdc/nipype,Leoniela/nipype,dgellis90/nipype,arokem/nipype,grlee77/nipype,wanderine/nipype,mick-d/nipype,gerddie/nipype,wanderine/nipype,FCP-INDI/nipype,sgiavasis/nipype,blakedewey/nipype,carolFrohlich/nipype,FCP-INDI/nipype,dgellis90/nipype,blakedewey/nipype,sgiavasis/nipype,blakedewey/nipype,carolFrohlich/nipype,arokem/nipype,JohnGriffiths/nipype,carlohamalainen/nipype,pearsonlab/nipype,mick-d/nipype,arokem/nipype,rameshvs/nipype,iglpdc/nipype,gerddie/nipype,dmordom/nipype,JohnGriffiths/nipype,grlee77/nipype,fprados/nipype,iglpdc/nipype,satra/NiPypeold,iglpdc/nipype,gerddie/nipype,rameshvs/nipype,glatard/nipype,pearsonlab/nipype,mick-d/nipype_source,mick-d/nipype,FredLoney/nipype,Leoniela/nipype,grlee77/nipype,dmordom/nipype,JohnGriffiths/nipype,glatard/nipype,FredLoney/nipype
python
## Code Before: # stdlib imports import os import sys #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) # local imports from interfacedocgen import InterfaceHelpWriter package = 'nipype' outdir = os.path.join('interfaces','generated') docwriter = InterfaceHelpWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.pipeline', '.\testing', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.interfaces\.base$', '\.interfaces\.matlab$', '\.interfaces\.rest$', '\.interfaces\.pymvpa$', '\.interfaces\.traits', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', '.\testing', ] docwriter.class_skip_patterns += ['FSL', 'FS', 'Spm', 'Tester', 'Spec$', 'afni', 'Numpy' ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='interfaces') print '%d files written' % len(docwriter.written_modules) ## Instruction: Remove NipypeTester from doc generation. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1373 ead46cd0-7350-4e37-8683-fc4c6f79bf00 ## Code After: # stdlib imports import os import sys #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) # local imports from interfacedocgen import InterfaceHelpWriter package = 'nipype' outdir = os.path.join('interfaces','generated') docwriter = InterfaceHelpWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.pipeline', '.\testing', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.interfaces\.base$', '\.interfaces\.matlab$', '\.interfaces\.rest$', '\.interfaces\.pymvpa$', '\.interfaces\.traits', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', '.\testing', ] docwriter.class_skip_patterns += ['FSL', 'FS', 'Spm', 'Tester', 'Spec$', 'afni', 'Numpy' # NipypeTester raises an # exception when instantiated in # InterfaceHelpWriter.generate_api_doc 'NipypeTester', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='interfaces') print '%d files written' % len(docwriter.written_modules)
... 'Spec$', 'afni', 'Numpy' # NipypeTester raises an # exception when instantiated in # InterfaceHelpWriter.generate_api_doc 'NipypeTester', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='interfaces') ...
ae916c1ee52941bb5a1ccf87abe2a9758897bd08
IPython/utils/ulinecache.py
IPython/utils/ulinecache.py
import functools import linecache import sys from IPython.utils import py3compat from IPython.utils import openpy getline = linecache.getline # getlines has to be looked up at runtime, because doctests monkeypatch it. @functools.wraps(linecache.getlines) def getlines(filename, module_globals=None): return linecache.getlines(filename, module_globals=module_globals)
import functools import linecache import sys from warnings import warn from IPython.utils import py3compat from IPython.utils import openpy getline = linecache.getline # getlines has to be looked up at runtime, because doctests monkeypatch it. @functools.wraps(linecache.getlines) def getlines(filename, module_globals=None): """ Deprecated since IPython 6.0 """ warn(("`IPython.utils.ulinecache.getlines` is deprecated since" " IPython 6.0 and will be removed in future versions."), DeprecationWarning, stacklevel=2) return linecache.getlines(filename, module_globals=module_globals)
Add deprecation warnings and message to getlines function
Add deprecation warnings and message to getlines function
Python
bsd-3-clause
ipython/ipython,ipython/ipython
python
## Code Before: import functools import linecache import sys from IPython.utils import py3compat from IPython.utils import openpy getline = linecache.getline # getlines has to be looked up at runtime, because doctests monkeypatch it. @functools.wraps(linecache.getlines) def getlines(filename, module_globals=None): return linecache.getlines(filename, module_globals=module_globals) ## Instruction: Add deprecation warnings and message to getlines function ## Code After: import functools import linecache import sys from warnings import warn from IPython.utils import py3compat from IPython.utils import openpy getline = linecache.getline # getlines has to be looked up at runtime, because doctests monkeypatch it. @functools.wraps(linecache.getlines) def getlines(filename, module_globals=None): """ Deprecated since IPython 6.0 """ warn(("`IPython.utils.ulinecache.getlines` is deprecated since" " IPython 6.0 and will be removed in future versions."), DeprecationWarning, stacklevel=2) return linecache.getlines(filename, module_globals=module_globals)
... import functools import linecache import sys from warnings import warn from IPython.utils import py3compat from IPython.utils import openpy ... # getlines has to be looked up at runtime, because doctests monkeypatch it. @functools.wraps(linecache.getlines) def getlines(filename, module_globals=None): """ Deprecated since IPython 6.0 """ warn(("`IPython.utils.ulinecache.getlines` is deprecated since" " IPython 6.0 and will be removed in future versions."), DeprecationWarning, stacklevel=2) return linecache.getlines(filename, module_globals=module_globals) ...
7698ad7a907da5e7b4ad7cfd40255bb9c44b5b87
scripts/sort-cluster-issue.py
scripts/sort-cluster-issue.py
from pyspark import SparkContext from pyspark.sql import SQLContext, Row from pyspark.sql.functions import asc, desc if __name__ == "__main__": sc = SparkContext(appName='resort data') sqlContext = SQLContext(sc) df = sqlContext.read.load('hdfs://discovery3:9000/tmp/dasmith/c19-20160919-a50-o08/pretty.parquet') #df = sqlContext.read.load('hdfs://discovery3:9000/tmp/dasmith/c19-20160402-a50-o08/out.parquet') df.registerTempTable("newspaper") df2 = sqlContext.sql("select series, date, count(*) as cnt from newspaper group by series, date order by cnt desc") df3 = df.join(df2, ['series', 'date']) df3.sort(desc("cnt"), asc("begin"), asc("end"))\ .write.json('/gss_gpfs_scratch/xu.shao/network/resorted-pretty.json')
from __future__ import print_function import sys from pyspark import SparkContext from pyspark.sql import SQLContext from pyspark.sql.functions import desc if __name__ == "__main__": if len(sys.argv) < 3: print("Usage: sort-cluster-issue.py <input> <output>", file=sys.stderr) exit(-1) sc = SparkContext(appName='resort data') sqlContext = SQLContext(sc) df = sqlContext.read.load(sys.argv[1]) df.registerTempTable("newspaper") df2 = sqlContext.sql('select series, date, count(*) as cnt from newspaper group by series, date') df3 = df.join(df2, ['series', 'date']) df3.sort(desc("cnt"), "series", "date", "id", "begin", "end")\ .write.option('compression', 'gzip').json(sys.argv[2]) sc.stop()
Use command-line arguments for paths.
Use command-line arguments for paths.
Python
apache-2.0
ViralTexts/vt-passim,ViralTexts/vt-passim,ViralTexts/vt-passim
python
## Code Before: from pyspark import SparkContext from pyspark.sql import SQLContext, Row from pyspark.sql.functions import asc, desc if __name__ == "__main__": sc = SparkContext(appName='resort data') sqlContext = SQLContext(sc) df = sqlContext.read.load('hdfs://discovery3:9000/tmp/dasmith/c19-20160919-a50-o08/pretty.parquet') #df = sqlContext.read.load('hdfs://discovery3:9000/tmp/dasmith/c19-20160402-a50-o08/out.parquet') df.registerTempTable("newspaper") df2 = sqlContext.sql("select series, date, count(*) as cnt from newspaper group by series, date order by cnt desc") df3 = df.join(df2, ['series', 'date']) df3.sort(desc("cnt"), asc("begin"), asc("end"))\ .write.json('/gss_gpfs_scratch/xu.shao/network/resorted-pretty.json') ## Instruction: Use command-line arguments for paths. ## Code After: from __future__ import print_function import sys from pyspark import SparkContext from pyspark.sql import SQLContext from pyspark.sql.functions import desc if __name__ == "__main__": if len(sys.argv) < 3: print("Usage: sort-cluster-issue.py <input> <output>", file=sys.stderr) exit(-1) sc = SparkContext(appName='resort data') sqlContext = SQLContext(sc) df = sqlContext.read.load(sys.argv[1]) df.registerTempTable("newspaper") df2 = sqlContext.sql('select series, date, count(*) as cnt from newspaper group by series, date') df3 = df.join(df2, ['series', 'date']) df3.sort(desc("cnt"), "series", "date", "id", "begin", "end")\ .write.option('compression', 'gzip').json(sys.argv[2]) sc.stop()
... from __future__ import print_function import sys from pyspark import SparkContext from pyspark.sql import SQLContext from pyspark.sql.functions import desc if __name__ == "__main__": if len(sys.argv) < 3: print("Usage: sort-cluster-issue.py <input> <output>", file=sys.stderr) exit(-1) sc = SparkContext(appName='resort data') sqlContext = SQLContext(sc) df = sqlContext.read.load(sys.argv[1]) df.registerTempTable("newspaper") df2 = sqlContext.sql('select series, date, count(*) as cnt from newspaper group by series, date') df3 = df.join(df2, ['series', 'date']) df3.sort(desc("cnt"), "series", "date", "id", "begin", "end")\ .write.option('compression', 'gzip').json(sys.argv[2]) sc.stop() ...
ad751d41700fac47cf818ba336a34bb316bde488
opacclient/libopac/src/main/java/de/geeksfactory/opacclient/utils/KotlinUtils.kt
opacclient/libopac/src/main/java/de/geeksfactory/opacclient/utils/KotlinUtils.kt
package de.geeksfactory.opacclient.utils import org.json.JSONObject import org.jsoup.Jsoup import org.jsoup.nodes.Document import org.jsoup.nodes.Element import org.jsoup.nodes.TextNode import org.jsoup.select.Elements val String.html: Document get() = Jsoup.parse(this) val String.jsonObject: JSONObject get() = JSONObject(this) operator fun Element.get(name: String): String = this.attr(name) val Element.text: String get() = this.text() val Elements.text: String get() = this.text() val TextNode.text: String get() = this.text()
package de.geeksfactory.opacclient.utils import org.json.JSONArray import org.json.JSONObject import org.jsoup.Jsoup import org.jsoup.nodes.Document import org.jsoup.nodes.Element import org.jsoup.nodes.TextNode import org.jsoup.select.Elements val String.html: Document get() = Jsoup.parse(this) val String.jsonObject: JSONObject get() = JSONObject(this) operator fun Element.get(name: String): String = this.attr(name) val Element.text: String get() = this.text() val Elements.text: String get() = this.text() val TextNode.text: String get() = this.text() // JSONArray extension functions inline fun <reified T, R> JSONArray.map(transform: (T) -> R): List<R> = (0..length()).map { i -> transform(get(i) as T) } inline fun <reified T> JSONArray.forEach(function: (T) -> Unit) = (0..length()).forEach { i -> function(get(i) as T) }
Add some Kotlin extension functions to simplify handling of JSONArrays
Add some Kotlin extension functions to simplify handling of JSONArrays
Kotlin
mit
opacapp/opacclient,opacapp/opacclient,opacapp/opacclient,opacapp/opacclient,raphaelm/opacclient,opacapp/opacclient,raphaelm/opacclient,raphaelm/opacclient
kotlin
## Code Before: package de.geeksfactory.opacclient.utils import org.json.JSONObject import org.jsoup.Jsoup import org.jsoup.nodes.Document import org.jsoup.nodes.Element import org.jsoup.nodes.TextNode import org.jsoup.select.Elements val String.html: Document get() = Jsoup.parse(this) val String.jsonObject: JSONObject get() = JSONObject(this) operator fun Element.get(name: String): String = this.attr(name) val Element.text: String get() = this.text() val Elements.text: String get() = this.text() val TextNode.text: String get() = this.text() ## Instruction: Add some Kotlin extension functions to simplify handling of JSONArrays ## Code After: package de.geeksfactory.opacclient.utils import org.json.JSONArray import org.json.JSONObject import org.jsoup.Jsoup import org.jsoup.nodes.Document import org.jsoup.nodes.Element import org.jsoup.nodes.TextNode import org.jsoup.select.Elements val String.html: Document get() = Jsoup.parse(this) val String.jsonObject: JSONObject get() = JSONObject(this) operator fun Element.get(name: String): String = this.attr(name) val Element.text: String get() = this.text() val Elements.text: String get() = this.text() val TextNode.text: String get() = this.text() // JSONArray extension functions inline fun <reified T, R> JSONArray.map(transform: (T) -> R): List<R> = (0..length()).map { i -> transform(get(i) as T) } inline fun <reified T> JSONArray.forEach(function: (T) -> Unit) = (0..length()).forEach { i -> function(get(i) as T) }
// ... existing code ... package de.geeksfactory.opacclient.utils import org.json.JSONArray import org.json.JSONObject import org.jsoup.Jsoup import org.jsoup.nodes.Document // ... modified code ... val TextNode.text: String get() = this.text() // JSONArray extension functions inline fun <reified T, R> JSONArray.map(transform: (T) -> R): List<R> = (0..length()).map { i -> transform(get(i) as T) } inline fun <reified T> JSONArray.forEach(function: (T) -> Unit) = (0..length()).forEach { i -> function(get(i) as T) } // ... rest of the code ...
6e76b51f5aa1c5ae54130f52e176195a992284aa
src/core/monkeypatch.py
src/core/monkeypatch.py
from django.conf import settings from django.core.urlresolvers import reverse as django_reverse from django.utils.encoding import iri_to_uri from core.middleware import GlobalRequestMiddleware def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None): """ This monkey patch will add the journal_code to reverse kwargs if the URL_CONFIG setting is set to 'patch' """ if not viewname.startswith('djdt'): local_request = GlobalRequestMiddleware.get_current_request() if settings.URL_CONFIG == 'path': code = local_request.journal.code if local_request.journal else 'press' if kwargs and not args: kwargs['journal_code'] = code else: kwargs = {'journal_code': code} # Drop kwargs if user is accessing admin site. if local_request.path.startswith('/admin/'): kwargs.pop('journal_code') # Drop kwargs if we have args (most likely from the template if args: kwargs = None args = [code] + args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # Ensure any unicode characters in the URL are escaped. return iri_to_uri(url)
from django.conf import settings from django.core.urlresolvers import reverse as django_reverse from django.utils.encoding import iri_to_uri from core.middleware import GlobalRequestMiddleware def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None): """ This monkey patch will add the journal_code to reverse kwargs if the URL_CONFIG setting is set to 'patch' """ if not viewname.startswith('djdt'): local_request = GlobalRequestMiddleware.get_current_request() if settings.URL_CONFIG == 'path': code = local_request.journal.code if local_request.journal else 'press' if kwargs and not args: kwargs['journal_code'] = code else: kwargs = {'journal_code': code} # Drop kwargs if user is accessing admin site. if local_request.path.startswith('/admin/'): kwargs.pop('journal_code') # Drop kwargs if we have args (most likely from the template if args: kwargs = None if settings.URL_CONFIG == 'path' and not local_request.path.startswith('/admin/'): args = tuple([code] + [x for x in args]) else: args = args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # Ensure any unicode characters in the URL are escaped. return iri_to_uri(url)
Update for janeway's monkey patch.
Update for janeway's monkey patch.
Python
agpl-3.0
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
python
## Code Before: from django.conf import settings from django.core.urlresolvers import reverse as django_reverse from django.utils.encoding import iri_to_uri from core.middleware import GlobalRequestMiddleware def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None): """ This monkey patch will add the journal_code to reverse kwargs if the URL_CONFIG setting is set to 'patch' """ if not viewname.startswith('djdt'): local_request = GlobalRequestMiddleware.get_current_request() if settings.URL_CONFIG == 'path': code = local_request.journal.code if local_request.journal else 'press' if kwargs and not args: kwargs['journal_code'] = code else: kwargs = {'journal_code': code} # Drop kwargs if user is accessing admin site. if local_request.path.startswith('/admin/'): kwargs.pop('journal_code') # Drop kwargs if we have args (most likely from the template if args: kwargs = None args = [code] + args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # Ensure any unicode characters in the URL are escaped. return iri_to_uri(url) ## Instruction: Update for janeway's monkey patch. ## Code After: from django.conf import settings from django.core.urlresolvers import reverse as django_reverse from django.utils.encoding import iri_to_uri from core.middleware import GlobalRequestMiddleware def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None): """ This monkey patch will add the journal_code to reverse kwargs if the URL_CONFIG setting is set to 'patch' """ if not viewname.startswith('djdt'): local_request = GlobalRequestMiddleware.get_current_request() if settings.URL_CONFIG == 'path': code = local_request.journal.code if local_request.journal else 'press' if kwargs and not args: kwargs['journal_code'] = code else: kwargs = {'journal_code': code} # Drop kwargs if user is accessing admin site. if local_request.path.startswith('/admin/'): kwargs.pop('journal_code') # Drop kwargs if we have args (most likely from the template if args: kwargs = None if settings.URL_CONFIG == 'path' and not local_request.path.startswith('/admin/'): args = tuple([code] + [x for x in args]) else: args = args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # Ensure any unicode characters in the URL are escaped. return iri_to_uri(url)
# ... existing code ... # Drop kwargs if we have args (most likely from the template if args: kwargs = None if settings.URL_CONFIG == 'path' and not local_request.path.startswith('/admin/'): args = tuple([code] + [x for x in args]) else: args = args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # ... rest of the code ...
520fce9f16978e6c4a7ea80a342b3fb148e19a7c
mobile/src/main/java/com/github/triplesolitaire/GamePauseDialogFragment.java
mobile/src/main/java/com/github/triplesolitaire/GamePauseDialogFragment.java
package com.github.triplesolitaire; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.os.Bundle; /** * Dialog to show when a user pauses the game */ public class GamePauseDialogFragment extends DialogFragment { @Override public Dialog onCreateDialog(final Bundle savedInstanceState) { final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(R.string.app_name).setIcon(R.drawable.icon).setMessage(R.string.game_paused) .setNegativeButton(getText(R.string.resume), null); return builder.create(); } }
package com.github.triplesolitaire; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.os.Build; import android.os.Bundle; /** * Dialog to show when a user pauses the game */ public class GamePauseDialogFragment extends DialogFragment { @Override public Dialog onCreateDialog(final Bundle savedInstanceState) { final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { builder.setIcon(R.drawable.icon); } builder.setTitle(R.string.app_name).setMessage(R.string.game_paused) .setNegativeButton(getText(R.string.resume), null); return builder.create(); } }
Remove icon from Pause dialog on Android 5.0+ devices
Remove icon from Pause dialog on Android 5.0+ devices
Java
bsd-3-clause
ianhanniballake/TripleSolitaire
java
## Code Before: package com.github.triplesolitaire; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.os.Bundle; /** * Dialog to show when a user pauses the game */ public class GamePauseDialogFragment extends DialogFragment { @Override public Dialog onCreateDialog(final Bundle savedInstanceState) { final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(R.string.app_name).setIcon(R.drawable.icon).setMessage(R.string.game_paused) .setNegativeButton(getText(R.string.resume), null); return builder.create(); } } ## Instruction: Remove icon from Pause dialog on Android 5.0+ devices ## Code After: package com.github.triplesolitaire; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.os.Build; import android.os.Bundle; /** * Dialog to show when a user pauses the game */ public class GamePauseDialogFragment extends DialogFragment { @Override public Dialog onCreateDialog(final Bundle savedInstanceState) { final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { builder.setIcon(R.drawable.icon); } builder.setTitle(R.string.app_name).setMessage(R.string.game_paused) .setNegativeButton(getText(R.string.resume), null); return builder.create(); } }
... import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.os.Build; import android.os.Bundle; /** ... @Override public Dialog onCreateDialog(final Bundle savedInstanceState) { final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { builder.setIcon(R.drawable.icon); } builder.setTitle(R.string.app_name).setMessage(R.string.game_paused) .setNegativeButton(getText(R.string.resume), null); return builder.create(); } ...
b3766d951e4e574938c84f8dbd3b1d6b47739c86
src/test/java/org/buddycloud/channelserver/pubsub/model/impl/NodeAffiliationImplTest.java
src/test/java/org/buddycloud/channelserver/pubsub/model/impl/NodeAffiliationImplTest.java
package org.buddycloud.channelserver.pubsub.model.impl; import nl.jqno.equalsverifier.EqualsVerifier; import org.junit.Test; public class NodeAffiliationImplTest { @Test public void testEquals() { EqualsVerifier.forClass(NodeAffiliationImpl.class).verify(); } }
package org.buddycloud.channelserver.pubsub.model.impl; import java.util.Date; import junit.framework.Assert; import nl.jqno.equalsverifier.EqualsVerifier; import org.buddycloud.channelserver.pubsub.affiliation.Affiliations; import org.junit.Test; import org.xmpp.packet.JID; public class NodeAffiliationImplTest { private String node = "/user/[email protected]/posts"; private JID fullJid = new JID("[email protected]/resource"); @Test public void testEquals() { EqualsVerifier.forClass(NodeAffiliationImpl.class).verify(); } @Test public void providingFullJidSetsAsAsBareJid() throws Exception { NodeAffiliationImpl affiliation = new NodeAffiliationImpl(node, fullJid, Affiliations.member, new Date()); Assert.assertEquals(fullJid.toBareJID(), affiliation.getUser().toString()); } }
Increase coverage of NodeAffiliationImpl to 100%
Increase coverage of NodeAffiliationImpl to 100%
Java
apache-2.0
enom/buddycloud-server-java,webhost/buddycloud-server-java,buddycloud/buddycloud-server-java,surevine/buddycloud-server-java,surevine/buddycloud-server-java,ashward/buddycloud-server-java,enom/buddycloud-server-java,webhost/buddycloud-server-java,ashward/buddycloud-server-java,buddycloud/buddycloud-server-java
java
## Code Before: package org.buddycloud.channelserver.pubsub.model.impl; import nl.jqno.equalsverifier.EqualsVerifier; import org.junit.Test; public class NodeAffiliationImplTest { @Test public void testEquals() { EqualsVerifier.forClass(NodeAffiliationImpl.class).verify(); } } ## Instruction: Increase coverage of NodeAffiliationImpl to 100% ## Code After: package org.buddycloud.channelserver.pubsub.model.impl; import java.util.Date; import junit.framework.Assert; import nl.jqno.equalsverifier.EqualsVerifier; import org.buddycloud.channelserver.pubsub.affiliation.Affiliations; import org.junit.Test; import org.xmpp.packet.JID; public class NodeAffiliationImplTest { private String node = "/user/[email protected]/posts"; private JID fullJid = new JID("[email protected]/resource"); @Test public void testEquals() { EqualsVerifier.forClass(NodeAffiliationImpl.class).verify(); } @Test public void providingFullJidSetsAsAsBareJid() throws Exception { NodeAffiliationImpl affiliation = new NodeAffiliationImpl(node, fullJid, Affiliations.member, new Date()); Assert.assertEquals(fullJid.toBareJID(), affiliation.getUser().toString()); } }
// ... existing code ... package org.buddycloud.channelserver.pubsub.model.impl; import java.util.Date; import junit.framework.Assert; import nl.jqno.equalsverifier.EqualsVerifier; import org.buddycloud.channelserver.pubsub.affiliation.Affiliations; import org.junit.Test; import org.xmpp.packet.JID; public class NodeAffiliationImplTest { private String node = "/user/[email protected]/posts"; private JID fullJid = new JID("[email protected]/resource"); @Test public void testEquals() { EqualsVerifier.forClass(NodeAffiliationImpl.class).verify(); } @Test public void providingFullJidSetsAsAsBareJid() throws Exception { NodeAffiliationImpl affiliation = new NodeAffiliationImpl(node, fullJid, Affiliations.member, new Date()); Assert.assertEquals(fullJid.toBareJID(), affiliation.getUser().toString()); } } // ... rest of the code ...
ae49b56c52847aadda7be62071dcb33b5c22e6eb
Test/src/minunit.h
Test/src/minunit.h
/* * minunit.h * * Source: http://www.jera.com/techinfo/jtns/jtn002.html */ #include <stdio.h> extern int tests_run; #define mu_assert(message, test) do { \ if (!(test)) { \ return message; \ } \ } while (0) #define mu_run_test(test, name) do { \ test_head(name); \ char const * message = test(); \ tests_run++; \ if (message) { \ test_failure; \ printf(" * %s\n", message); \ } else { \ test_success; \ } \ } while (0) #define test_head(name) printf("Test %s... ", name) #define test_success printf("[OK]\n") #define test_failure printf("[FAIL]\n")
/* * minunit.h * * Source: http://www.jera.com/techinfo/jtns/jtn002.html */ #include <stdio.h> extern int tests_run; #define mu_assert(message, test) do { \ if (!(test)) { \ return message; \ } \ } while (0) #define mu_run_test(test, name) do { \ printf("Test %s... ", name); \ char const * message = test(); \ tests_run++; \ if (message) { \ if (message[0] != '\0') { \ printf("[FAIL]\n * %s\n", message); \ } else { \ printf("[OK]\n"); \ } \ } else { \ printf("\n"); \ } \ } while (0)
Remove useless unit testing definitions.
Remove useless unit testing definitions.
C
mit
AymericGenet/SPHINCS-arduinodue,AymericGenet/SPHINCS-arduinodue,AymericGenet/SPHINCS-arduinodue
c
## Code Before: /* * minunit.h * * Source: http://www.jera.com/techinfo/jtns/jtn002.html */ #include <stdio.h> extern int tests_run; #define mu_assert(message, test) do { \ if (!(test)) { \ return message; \ } \ } while (0) #define mu_run_test(test, name) do { \ test_head(name); \ char const * message = test(); \ tests_run++; \ if (message) { \ test_failure; \ printf(" * %s\n", message); \ } else { \ test_success; \ } \ } while (0) #define test_head(name) printf("Test %s... ", name) #define test_success printf("[OK]\n") #define test_failure printf("[FAIL]\n") ## Instruction: Remove useless unit testing definitions. ## Code After: /* * minunit.h * * Source: http://www.jera.com/techinfo/jtns/jtn002.html */ #include <stdio.h> extern int tests_run; #define mu_assert(message, test) do { \ if (!(test)) { \ return message; \ } \ } while (0) #define mu_run_test(test, name) do { \ printf("Test %s... ", name); \ char const * message = test(); \ tests_run++; \ if (message) { \ if (message[0] != '\0') { \ printf("[FAIL]\n * %s\n", message); \ } else { \ printf("[OK]\n"); \ } \ } else { \ printf("\n"); \ } \ } while (0)
# ... existing code ... } while (0) #define mu_run_test(test, name) do { \ printf("Test %s... ", name); \ char const * message = test(); \ tests_run++; \ if (message) { \ if (message[0] != '\0') { \ printf("[FAIL]\n * %s\n", message); \ } else { \ printf("[OK]\n"); \ } \ } else { \ printf("\n"); \ } \ } while (0) # ... rest of the code ...
776c1dbda3871c2b94d849ea59db25f93bb59525
src/mmw/apps/water_balance/views.py
src/mmw/apps/water_balance/views.py
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from django.shortcuts import render_to_response def home_page(request): return render_to_response('home_page/index.html')
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from django.shortcuts import render_to_response from django.template import RequestContext def home_page(request): return render_to_response('home_page/index.html', RequestContext(request))
Add RequestContext to Micro site
Add RequestContext to Micro site This allows us to populate settings variables such as Google Analytics codes. See original work done for #769. Refs #920
Python
apache-2.0
lliss/model-my-watershed,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,lliss/model-my-watershed,project-icp/bee-pollinator-app
python
## Code Before: from __future__ import print_function from __future__ import unicode_literals from __future__ import division from django.shortcuts import render_to_response def home_page(request): return render_to_response('home_page/index.html') ## Instruction: Add RequestContext to Micro site This allows us to populate settings variables such as Google Analytics codes. See original work done for #769. Refs #920 ## Code After: from __future__ import print_function from __future__ import unicode_literals from __future__ import division from django.shortcuts import render_to_response from django.template import RequestContext def home_page(request): return render_to_response('home_page/index.html', RequestContext(request))
... from __future__ import division from django.shortcuts import render_to_response from django.template import RequestContext def home_page(request): return render_to_response('home_page/index.html', RequestContext(request)) ...
b6fff4186de098946cc1e4c0204f78936f73044f
tests/basics/tuple1.py
tests/basics/tuple1.py
x = (1, 2, 3 * 4) print(x) try: x[0] = 4 except TypeError: print("TypeError") print(x) try: x.append(5) except AttributeError: print("AttributeError") print(x[1:]) print(x[:-1]) print(x[2:3]) print(x + (10, 100, 10000)) # construction of tuple from large iterator (tests implementation detail of uPy) print(tuple(range(20))) # unsupported unary operation try: +() except TypeError: print('TypeError') # unsupported type on RHS of add try: () + None except TypeError: print('TypeError')
x = (1, 2, 3 * 4) print(x) try: x[0] = 4 except TypeError: print("TypeError") print(x) try: x.append(5) except AttributeError: print("AttributeError") print(x[1:]) print(x[:-1]) print(x[2:3]) print(x + (10, 100, 10000)) # inplace add operator x += (10, 11, 12) print(x) # construction of tuple from large iterator (tests implementation detail of uPy) print(tuple(range(20))) # unsupported unary operation try: +() except TypeError: print('TypeError') # unsupported type on RHS of add try: () + None except TypeError: print('TypeError')
Add test for tuple inplace add.
tests/basics: Add test for tuple inplace add.
Python
mit
infinnovation/micropython,dmazzella/micropython,henriknelson/micropython,chrisdearman/micropython,deshipu/micropython,AriZuu/micropython,infinnovation/micropython,AriZuu/micropython,puuu/micropython,alex-robbins/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,chrisdearman/micropython,deshipu/micropython,micropython/micropython-esp32,trezor/micropython,pfalcon/micropython,dmazzella/micropython,AriZuu/micropython,cwyark/micropython,kerneltask/micropython,torwag/micropython,dmazzella/micropython,selste/micropython,alex-robbins/micropython,pozetroninc/micropython,pozetroninc/micropython,swegener/micropython,tralamazza/micropython,lowRISC/micropython,pramasoul/micropython,PappaPeppar/micropython,deshipu/micropython,cwyark/micropython,lowRISC/micropython,ryannathans/micropython,MrSurly/micropython-esp32,adafruit/circuitpython,oopy/micropython,adafruit/micropython,chrisdearman/micropython,MrSurly/micropython,pramasoul/micropython,puuu/micropython,oopy/micropython,lowRISC/micropython,kerneltask/micropython,MrSurly/micropython-esp32,pozetroninc/micropython,ryannathans/micropython,infinnovation/micropython,swegener/micropython,torwag/micropython,bvernoux/micropython,selste/micropython,trezor/micropython,tobbad/micropython,blazewicz/micropython,adafruit/circuitpython,infinnovation/micropython,TDAbboud/micropython,torwag/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,HenrikSolver/micropython,AriZuu/micropython,micropython/micropython-esp32,blazewicz/micropython,cwyark/micropython,tralamazza/micropython,hiway/micropython,henriknelson/micropython,oopy/micropython,HenrikSolver/micropython,bvernoux/micropython,tobbad/micropython,hiway/micropython,deshipu/micropython,torwag/micropython,henriknelson/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,cwyark/micropython,blazewicz/micropython,micropython/micropython-esp32,cwyark/micropython,micropython/micropython-esp32,ryannathans/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,kerneltask/micropython,alex-robbins/micropython,deshipu/micropython,PappaPeppar/micropython,pfalcon/micropython,tobbad/micropython,pramasoul/micropython,MrSurly/micropython-esp32,hiway/micropython,ryannathans/micropython,swegener/micropython,kerneltask/micropython,selste/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,Timmenem/micropython,blazewicz/micropython,adafruit/circuitpython,Timmenem/micropython,MrSurly/micropython,Timmenem/micropython,adafruit/circuitpython,infinnovation/micropython,tralamazza/micropython,toolmacher/micropython,MrSurly/micropython,pozetroninc/micropython,adafruit/circuitpython,lowRISC/micropython,ryannathans/micropython,toolmacher/micropython,pfalcon/micropython,dmazzella/micropython,lowRISC/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,chrisdearman/micropython,AriZuu/micropython,adafruit/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,TDAbboud/micropython,oopy/micropython,MrSurly/micropython-esp32,pramasoul/micropython,tobbad/micropython,tobbad/micropython,selste/micropython,adafruit/micropython,HenrikSolver/micropython,puuu/micropython,puuu/micropython,trezor/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,chrisdearman/micropython,toolmacher/micropython,oopy/micropython,hiway/micropython,adafruit/micropython,toolmacher/micropython,alex-robbins/micropython,Timmenem/micropython,TDAbboud/micropython,alex-robbins/micropython,pfalcon/micropython,pramasoul/micropython,bvernoux/micropython,henriknelson/micropython,trezor/micropython,TDAbboud/micropython,selste/micropython,adafruit/micropython,puuu/micropython,swegener/micropython,micropython/micropython-esp32,TDAbboud/micropython,MrSurly/micropython,swegener/micropython,HenrikSolver/micropython,pozetroninc/micropython,pfalcon/micropython,MrSurly/micropython-esp32,trezor/micropython,MrSurly/micropython,Timmenem/micropython,SHA2017-badge/micropython-esp32,toolmacher/micropython
python
## Code Before: x = (1, 2, 3 * 4) print(x) try: x[0] = 4 except TypeError: print("TypeError") print(x) try: x.append(5) except AttributeError: print("AttributeError") print(x[1:]) print(x[:-1]) print(x[2:3]) print(x + (10, 100, 10000)) # construction of tuple from large iterator (tests implementation detail of uPy) print(tuple(range(20))) # unsupported unary operation try: +() except TypeError: print('TypeError') # unsupported type on RHS of add try: () + None except TypeError: print('TypeError') ## Instruction: tests/basics: Add test for tuple inplace add. ## Code After: x = (1, 2, 3 * 4) print(x) try: x[0] = 4 except TypeError: print("TypeError") print(x) try: x.append(5) except AttributeError: print("AttributeError") print(x[1:]) print(x[:-1]) print(x[2:3]) print(x + (10, 100, 10000)) # inplace add operator x += (10, 11, 12) print(x) # construction of tuple from large iterator (tests implementation detail of uPy) print(tuple(range(20))) # unsupported unary operation try: +() except TypeError: print('TypeError') # unsupported type on RHS of add try: () + None except TypeError: print('TypeError')
... print(x + (10, 100, 10000)) # inplace add operator x += (10, 11, 12) print(x) # construction of tuple from large iterator (tests implementation detail of uPy) print(tuple(range(20))) ...
ed491860864c363be36d99c09ff0131a5fe00aaf
test/Driver/Dependencies/Inputs/touch.py
test/Driver/Dependencies/Inputs/touch.py
import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # offset between Unix and LLVM epochs timeVal += 946684800 # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
Fix tests for file timestamps to drop the LLVM epoch offset.
Fix tests for file timestamps to drop the LLVM epoch offset. Now that Swift is not using LLVM's TimeValue (564fc6f2 and previous commit) there is no offset from the system_clock epoch. The offset could be added into the tests that use touch.py (so the times would not be back in 1984) but I decided not to do that to avoid merge conflicts in the test files.
Python
apache-2.0
aschwaighofer/swift,tinysun212/swift-windows,arvedviehweger/swift,xwu/swift,airspeedswift/swift,parkera/swift,tinysun212/swift-windows,JGiola/swift,JaSpa/swift,JGiola/swift,parkera/swift,zisko/swift,hughbe/swift,codestergit/swift,CodaFi/swift,rudkx/swift,huonw/swift,tkremenek/swift,jtbandes/swift,codestergit/swift,practicalswift/swift,frootloops/swift,tardieu/swift,jckarter/swift,return/swift,benlangmuir/swift,xwu/swift,danielmartin/swift,djwbrown/swift,gribozavr/swift,return/swift,atrick/swift,arvedviehweger/swift,danielmartin/swift,nathawes/swift,tjw/swift,OscarSwanros/swift,shahmishal/swift,benlangmuir/swift,benlangmuir/swift,brentdax/swift,JGiola/swift,amraboelela/swift,roambotics/swift,tardieu/swift,tinysun212/swift-windows,jmgc/swift,parkera/swift,tardieu/swift,danielmartin/swift,gribozavr/swift,sschiau/swift,natecook1000/swift,huonw/swift,karwa/swift,gregomni/swift,harlanhaskins/swift,aschwaighofer/swift,practicalswift/swift,manavgabhawala/swift,glessard/swift,uasys/swift,gregomni/swift,allevato/swift,jckarter/swift,alblue/swift,natecook1000/swift,amraboelela/swift,xedin/swift,gottesmm/swift,hughbe/swift,harlanhaskins/swift,felix91gr/swift,return/swift,tkremenek/swift,shajrawi/swift,allevato/swift,sschiau/swift,sschiau/swift,stephentyrone/swift,tkremenek/swift,uasys/swift,JaSpa/swift,stephentyrone/swift,apple/swift,milseman/swift,lorentey/swift,shahmishal/swift,shajrawi/swift,atrick/swift,practicalswift/swift,xwu/swift,gribozavr/swift,return/swift,xedin/swift,hooman/swift,CodaFi/swift,gregomni/swift,apple/swift,austinzheng/swift,zisko/swift,milseman/swift,huonw/swift,jopamer/swift,jckarter/swift,benlangmuir/swift,stephentyrone/swift,xwu/swift,tardieu/swift,amraboelela/swift,swiftix/swift,CodaFi/swift,bitjammer/swift,danielmartin/swift,huonw/swift,JaSpa/swift,xedin/swift,austinzheng/swift,lorentey/swift,huonw/swift,deyton/swift,nathawes/swift,lorentey/swift,apple/swift,austinzheng/swift,harlanhaskins/swift,milseman/swift,glessard/swift,devincoughlin/swift,airspeedswift/swift,OscarSwanros/swift,glessard/swift,felix91gr/swift,OscarSwanros/swift,nathawes/swift,karwa/swift,gregomni/swift,tardieu/swift,felix91gr/swift,hooman/swift,apple/swift,atrick/swift,parkera/swift,gottesmm/swift,deyton/swift,aschwaighofer/swift,devincoughlin/swift,arvedviehweger/swift,bitjammer/swift,harlanhaskins/swift,gregomni/swift,felix91gr/swift,alblue/swift,shajrawi/swift,zisko/swift,jmgc/swift,tjw/swift,sschiau/swift,frootloops/swift,jtbandes/swift,xedin/swift,ahoppen/swift,xedin/swift,austinzheng/swift,JaSpa/swift,shahmishal/swift,atrick/swift,brentdax/swift,arvedviehweger/swift,brentdax/swift,tardieu/swift,JGiola/swift,xwu/swift,devincoughlin/swift,xwu/swift,calebd/swift,lorentey/swift,xedin/swift,jopamer/swift,atrick/swift,rudkx/swift,aschwaighofer/swift,jtbandes/swift,natecook1000/swift,hooman/swift,natecook1000/swift,tinysun212/swift-windows,uasys/swift,danielmartin/swift,airspeedswift/swift,brentdax/swift,swiftix/swift,devincoughlin/swift,natecook1000/swift,hooman/swift,codestergit/swift,tjw/swift,Jnosh/swift,swiftix/swift,gribozavr/swift,JGiola/swift,Jnosh/swift,harlanhaskins/swift,gottesmm/swift,hughbe/swift,tkremenek/swift,CodaFi/swift,hooman/swift,alblue/swift,karwa/swift,brentdax/swift,sschiau/swift,bitjammer/swift,uasys/swift,sschiau/swift,allevato/swift,return/swift,austinzheng/swift,gregomni/swift,arvedviehweger/swift,uasys/swift,zisko/swift,stephentyrone/swift,swiftix/swift,uasys/swift,shajrawi/swift,CodaFi/swift,lorentey/swift,hughbe/swift,bitjammer/swift,austinzheng/swift,tkremenek/swift,devincoughlin/swift,stephentyrone/swift,aschwaighofer/swift,frootloops/swift,xwu/swift,manavgabhawala/swift,manavgabhawala/swift,manavgabhawala/swift,Jnosh/swift,bitjammer/swift,bitjammer/swift,OscarSwanros/swift,karwa/swift,shahmishal/swift,hughbe/swift,milseman/swift,atrick/swift,hooman/swift,austinzheng/swift,glessard/swift,JGiola/swift,danielmartin/swift,parkera/swift,parkera/swift,return/swift,zisko/swift,glessard/swift,milseman/swift,alblue/swift,tkremenek/swift,tardieu/swift,rudkx/swift,jmgc/swift,shajrawi/swift,frootloops/swift,calebd/swift,tjw/swift,karwa/swift,ahoppen/swift,allevato/swift,tjw/swift,frootloops/swift,sschiau/swift,CodaFi/swift,felix91gr/swift,shahmishal/swift,aschwaighofer/swift,arvedviehweger/swift,codestergit/swift,JaSpa/swift,stephentyrone/swift,jopamer/swift,OscarSwanros/swift,apple/swift,apple/swift,jckarter/swift,djwbrown/swift,practicalswift/swift,deyton/swift,jopamer/swift,parkera/swift,harlanhaskins/swift,jmgc/swift,roambotics/swift,djwbrown/swift,gribozavr/swift,benlangmuir/swift,manavgabhawala/swift,parkera/swift,huonw/swift,amraboelela/swift,jtbandes/swift,tinysun212/swift-windows,shajrawi/swift,felix91gr/swift,roambotics/swift,ahoppen/swift,airspeedswift/swift,jtbandes/swift,djwbrown/swift,ahoppen/swift,tinysun212/swift-windows,devincoughlin/swift,tinysun212/swift-windows,JaSpa/swift,swiftix/swift,benlangmuir/swift,jtbandes/swift,amraboelela/swift,codestergit/swift,uasys/swift,milseman/swift,devincoughlin/swift,felix91gr/swift,lorentey/swift,OscarSwanros/swift,codestergit/swift,calebd/swift,Jnosh/swift,tjw/swift,rudkx/swift,manavgabhawala/swift,JaSpa/swift,zisko/swift,xedin/swift,jopamer/swift,allevato/swift,rudkx/swift,roambotics/swift,alblue/swift,rudkx/swift,lorentey/swift,shajrawi/swift,practicalswift/swift,jckarter/swift,stephentyrone/swift,natecook1000/swift,gribozavr/swift,calebd/swift,tjw/swift,practicalswift/swift,arvedviehweger/swift,gottesmm/swift,nathawes/swift,shahmishal/swift,Jnosh/swift,Jnosh/swift,hughbe/swift,nathawes/swift,practicalswift/swift,jmgc/swift,danielmartin/swift,jckarter/swift,devincoughlin/swift,shahmishal/swift,bitjammer/swift,shahmishal/swift,jtbandes/swift,jopamer/swift,airspeedswift/swift,gribozavr/swift,nathawes/swift,swiftix/swift,ahoppen/swift,djwbrown/swift,manavgabhawala/swift,hooman/swift,Jnosh/swift,codestergit/swift,gottesmm/swift,aschwaighofer/swift,sschiau/swift,calebd/swift,natecook1000/swift,roambotics/swift,allevato/swift,zisko/swift,karwa/swift,glessard/swift,airspeedswift/swift,gottesmm/swift,huonw/swift,djwbrown/swift,harlanhaskins/swift,hughbe/swift,practicalswift/swift,brentdax/swift,jopamer/swift,gottesmm/swift,return/swift,swiftix/swift,alblue/swift,frootloops/swift,amraboelela/swift,airspeedswift/swift,calebd/swift,karwa/swift,deyton/swift,allevato/swift,nathawes/swift,brentdax/swift,milseman/swift,tkremenek/swift,deyton/swift,gribozavr/swift,djwbrown/swift,OscarSwanros/swift,shajrawi/swift,lorentey/swift,jmgc/swift,xedin/swift,amraboelela/swift,jmgc/swift,frootloops/swift,CodaFi/swift,jckarter/swift,roambotics/swift,calebd/swift,alblue/swift,karwa/swift,deyton/swift,ahoppen/swift,deyton/swift
python
## Code Before: import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # offset between Unix and LLVM epochs timeVal += 946684800 # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal)) ## Instruction: Fix tests for file timestamps to drop the LLVM epoch offset. Now that Swift is not using LLVM's TimeValue (564fc6f2 and previous commit) there is no offset from the system_clock epoch. The offset could be added into the tests that use touch.py (so the times would not be back in 1984) but I decided not to do that to avoid merge conflicts in the test files. ## Code After: import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
... assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: ...
88bd75c4b0e039c208a1471d84006cdfb4bbaf93
starbowmodweb/site/templatetags/bbformat.py
starbowmodweb/site/templatetags/bbformat.py
from django import template import bbcode def bbcode_img(tag_name, value, options, parent, context): if tag_name in options and 'x' in options[tag_name]: options['width'], options['height'] = options[tag_name].split('x', 1) del options[tag_name] attrs = ' '.join([name+'="{}"' for name in options.keys()]) return ('<img src="{}" '+attrs+' />').format(value, *options.values()) bbcode_parser = bbcode.Parser() bbcode_parser.add_formatter("img", bbcode_img, replace_links=False) def bbformat(value): return bbcode_parser.format(value) register = template.Library() register.filter('bbformat', bbformat)
from django import template import bbcode def bbcode_img(tag_name, value, options, parent, context): if tag_name in options and 'x' in options[tag_name]: options['width'], options['height'] = options[tag_name].split('x', 1) del options[tag_name] attrs = ' '.join([name+'="{}"' for name in options.keys()]) return ('<img src="{}" '+attrs+' />').format(value, *options.values()) def bbcode_email(tag_name, value, options, parent, context): return '<a href="mailto:{}">{}</a>'.format(value, value) def bbcode_font(tag_name, value, options, parent, context): return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value) bbcode_parser = bbcode.Parser() bbcode_parser.add_formatter("img", bbcode_img, replace_links=False) bbcode_parser.add_formatter("email", bbcode_email) bbcode_parser.add_formatter("font", bbcode_font) def bbformat(value): return bbcode_parser.format(value) register = template.Library() register.filter('bbformat', bbformat)
Add support for email and font bbcode tags.
Add support for email and font bbcode tags.
Python
mit
Starbow/StarbowWebSite,Starbow/StarbowWebSite,Starbow/StarbowWebSite
python
## Code Before: from django import template import bbcode def bbcode_img(tag_name, value, options, parent, context): if tag_name in options and 'x' in options[tag_name]: options['width'], options['height'] = options[tag_name].split('x', 1) del options[tag_name] attrs = ' '.join([name+'="{}"' for name in options.keys()]) return ('<img src="{}" '+attrs+' />').format(value, *options.values()) bbcode_parser = bbcode.Parser() bbcode_parser.add_formatter("img", bbcode_img, replace_links=False) def bbformat(value): return bbcode_parser.format(value) register = template.Library() register.filter('bbformat', bbformat) ## Instruction: Add support for email and font bbcode tags. ## Code After: from django import template import bbcode def bbcode_img(tag_name, value, options, parent, context): if tag_name in options and 'x' in options[tag_name]: options['width'], options['height'] = options[tag_name].split('x', 1) del options[tag_name] attrs = ' '.join([name+'="{}"' for name in options.keys()]) return ('<img src="{}" '+attrs+' />').format(value, *options.values()) def bbcode_email(tag_name, value, options, parent, context): return '<a href="mailto:{}">{}</a>'.format(value, value) def bbcode_font(tag_name, value, options, parent, context): return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value) bbcode_parser = bbcode.Parser() bbcode_parser.add_formatter("img", bbcode_img, replace_links=False) bbcode_parser.add_formatter("email", bbcode_email) bbcode_parser.add_formatter("font", bbcode_font) def bbformat(value): return bbcode_parser.format(value) register = template.Library() register.filter('bbformat', bbformat)
# ... existing code ... return ('<img src="{}" '+attrs+' />').format(value, *options.values()) def bbcode_email(tag_name, value, options, parent, context): return '<a href="mailto:{}">{}</a>'.format(value, value) def bbcode_font(tag_name, value, options, parent, context): return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value) bbcode_parser = bbcode.Parser() bbcode_parser.add_formatter("img", bbcode_img, replace_links=False) bbcode_parser.add_formatter("email", bbcode_email) bbcode_parser.add_formatter("font", bbcode_font) def bbformat(value): # ... rest of the code ...
d156beeaf0638e585c616d697e1ecd76a98d8a3f
axelrod/tests/test_reflex.py
axelrod/tests/test_reflex.py
import axelrod from test_player import TestPlayer class Reflex_test(TestPlayer): def test_initial_nice_strategy(self): """ First response should always be cooperation. """ p1 = axelrod.Reflex() p2 = axelrod.Player() self.assertEqual(p1.strategy(p2), 'C') def test_representation(self): """ How do we appear? """ p1 = axelrod.Reflex() self.assertEqual(str(p1), "Reflex") def test_reset_method(self): """ Does self.reset() reset the self? """ p1 = axelrod.Reflex() p1.history = ['C', 'D', 'C', 'C'] p1.reset() self.assertEqual(p1.history, []) self.assertEqual(p1.response, 'C') def test_stochastic(self): """ We are not stochastic. """ self.assertFalse(axelrod.Reflex().stochastic)
import axelrod from test_player import TestPlayer class Reflex_test(TestPlayer): name = "Reflex" player = axelrod.Reflex stochastic = False def test_strategy(self): """ First response should always be cooperation. """ p1 = axelrod.Reflex() p2 = axelrod.Player() self.assertEqual(p1.strategy(p2), 'C') def test_reset_method(self): """ Does self.reset() reset the self? """ p1 = axelrod.Reflex() p1.history = ['C', 'D', 'C', 'C'] p1.reset() self.assertEqual(p1.history, []) self.assertEqual(p1.response, 'C')
Simplify tests to new format.
Simplify tests to new format.
Python
mit
marcharper/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod,ranjinidas/Axelrod
python
## Code Before: import axelrod from test_player import TestPlayer class Reflex_test(TestPlayer): def test_initial_nice_strategy(self): """ First response should always be cooperation. """ p1 = axelrod.Reflex() p2 = axelrod.Player() self.assertEqual(p1.strategy(p2), 'C') def test_representation(self): """ How do we appear? """ p1 = axelrod.Reflex() self.assertEqual(str(p1), "Reflex") def test_reset_method(self): """ Does self.reset() reset the self? """ p1 = axelrod.Reflex() p1.history = ['C', 'D', 'C', 'C'] p1.reset() self.assertEqual(p1.history, []) self.assertEqual(p1.response, 'C') def test_stochastic(self): """ We are not stochastic. """ self.assertFalse(axelrod.Reflex().stochastic) ## Instruction: Simplify tests to new format. ## Code After: import axelrod from test_player import TestPlayer class Reflex_test(TestPlayer): name = "Reflex" player = axelrod.Reflex stochastic = False def test_strategy(self): """ First response should always be cooperation. """ p1 = axelrod.Reflex() p2 = axelrod.Player() self.assertEqual(p1.strategy(p2), 'C') def test_reset_method(self): """ Does self.reset() reset the self? """ p1 = axelrod.Reflex() p1.history = ['C', 'D', 'C', 'C'] p1.reset() self.assertEqual(p1.history, []) self.assertEqual(p1.response, 'C')
... class Reflex_test(TestPlayer): name = "Reflex" player = axelrod.Reflex stochastic = False def test_strategy(self): """ First response should always be cooperation. """ p1 = axelrod.Reflex() p2 = axelrod.Player() self.assertEqual(p1.strategy(p2), 'C') def test_reset_method(self): ... p1.reset() self.assertEqual(p1.history, []) self.assertEqual(p1.response, 'C') ...
2990be32e0134ebed4be054f55ac8f3d8fda412c
RosettaCore/src/main/java/com/hubspot/rosetta/internal/RosettaModule.java
RosettaCore/src/main/java/com/hubspot/rosetta/internal/RosettaModule.java
package com.hubspot.rosetta.internal; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider; @SuppressWarnings("serial") public class RosettaModule extends Module { @Override public String getModuleName() { return "RosettaModule"; } @Override public Version version() { return Version.unknownVersion(); } @Override public void setupModule(SetupContext context) { context.addBeanSerializerModifier(new StoredAsJsonBeanSerializerModifier()); ObjectCodec codec = context.getOwner(); if (codec instanceof ObjectMapper) { ObjectMapper mapper = (ObjectMapper) codec; context.insertAnnotationIntrospector(new RosettaAnnotationIntrospector(mapper)); mapper.setSerializerProvider(new DefaultSerializerProvider.Impl()); mapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } } }
package com.hubspot.rosetta.internal; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider; @SuppressWarnings("serial") public class RosettaModule extends Module { @Override public String getModuleName() { return "RosettaModule"; } @Override public Version version() { return Version.unknownVersion(); } @Override public void setupModule(SetupContext context) { context.addBeanSerializerModifier(new StoredAsJsonBeanSerializerModifier()); ObjectCodec codec = context.getOwner(); if (codec instanceof ObjectMapper) { ObjectMapper mapper = (ObjectMapper) codec; context.insertAnnotationIntrospector(new RosettaAnnotationIntrospector(mapper)); mapper.setSerializerProvider(new DefaultSerializerProvider.Impl()); mapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(Include.ALWAYS); } } }
Set this in addition to annotation introspector approach
Set this in addition to annotation introspector approach
Java
apache-2.0
HubSpot/Rosetta
java
## Code Before: package com.hubspot.rosetta.internal; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider; @SuppressWarnings("serial") public class RosettaModule extends Module { @Override public String getModuleName() { return "RosettaModule"; } @Override public Version version() { return Version.unknownVersion(); } @Override public void setupModule(SetupContext context) { context.addBeanSerializerModifier(new StoredAsJsonBeanSerializerModifier()); ObjectCodec codec = context.getOwner(); if (codec instanceof ObjectMapper) { ObjectMapper mapper = (ObjectMapper) codec; context.insertAnnotationIntrospector(new RosettaAnnotationIntrospector(mapper)); mapper.setSerializerProvider(new DefaultSerializerProvider.Impl()); mapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } } } ## Instruction: Set this in addition to annotation introspector approach ## Code After: package com.hubspot.rosetta.internal; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider; @SuppressWarnings("serial") public class RosettaModule extends Module { @Override public String getModuleName() { return "RosettaModule"; } @Override public Version version() { return Version.unknownVersion(); } @Override public void setupModule(SetupContext context) { context.addBeanSerializerModifier(new StoredAsJsonBeanSerializerModifier()); ObjectCodec codec = context.getOwner(); if (codec instanceof ObjectMapper) { ObjectMapper mapper = (ObjectMapper) codec; context.insertAnnotationIntrospector(new RosettaAnnotationIntrospector(mapper)); mapper.setSerializerProvider(new DefaultSerializerProvider.Impl()); mapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(Include.ALWAYS); } } }
... package com.hubspot.rosetta.internal; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationFeature; ... mapper.setSerializerProvider(new DefaultSerializerProvider.Impl()); mapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(Include.ALWAYS); } } } ...
5cc071958aa63f46ec7f3708648f80a8424c661b
Lib/compositor/cmap.py
Lib/compositor/cmap.py
def extractCMAP(ttFont): cmap = {} cmapIDs = [(3, 10), (0, 3), (3, 1)] for i in range(len(cmapIDs)): if ttFont["cmap"].getcmap(*cmapIDs[i]): cmap = ttFont["cmap"].getcmap(*cmapIDs[i]).cmap break if not cmap: from compositor.error import CompositorError raise CompositorError("Found neither CMAP (3, 10), (0, 3), nor (3, 1) in font.") return cmap def reverseCMAP(cmap): reversed = {} for value, name in cmap.items(): if name not in reversed: reversed[name] = [] reversed[name].append(value) return reversed
def extractCMAP(ttFont): for platformID, encodingID in [(3, 10), (0, 3), (3, 1)]: cmapSubtable = ttFont["cmap"].getcmap(platformID, encodingID) if cmapSubtable is not None: return cmapSubtable.cmap from compositor.error import CompositorError raise CompositorError("Found neither CMAP (3, 10), (0, 3), nor (3, 1) in font.") def reverseCMAP(cmap): reversed = {} for value, name in cmap.items(): if name not in reversed: reversed[name] = [] reversed[name].append(value) return reversed
Make the code more compact
Make the code more compact
Python
mit
typesupply/compositor,anthrotype/compositor,anthrotype/compositor,typesupply/compositor
python
## Code Before: def extractCMAP(ttFont): cmap = {} cmapIDs = [(3, 10), (0, 3), (3, 1)] for i in range(len(cmapIDs)): if ttFont["cmap"].getcmap(*cmapIDs[i]): cmap = ttFont["cmap"].getcmap(*cmapIDs[i]).cmap break if not cmap: from compositor.error import CompositorError raise CompositorError("Found neither CMAP (3, 10), (0, 3), nor (3, 1) in font.") return cmap def reverseCMAP(cmap): reversed = {} for value, name in cmap.items(): if name not in reversed: reversed[name] = [] reversed[name].append(value) return reversed ## Instruction: Make the code more compact ## Code After: def extractCMAP(ttFont): for platformID, encodingID in [(3, 10), (0, 3), (3, 1)]: cmapSubtable = ttFont["cmap"].getcmap(platformID, encodingID) if cmapSubtable is not None: return cmapSubtable.cmap from compositor.error import CompositorError raise CompositorError("Found neither CMAP (3, 10), (0, 3), nor (3, 1) in font.") def reverseCMAP(cmap): reversed = {} for value, name in cmap.items(): if name not in reversed: reversed[name] = [] reversed[name].append(value) return reversed
# ... existing code ... def extractCMAP(ttFont): for platformID, encodingID in [(3, 10), (0, 3), (3, 1)]: cmapSubtable = ttFont["cmap"].getcmap(platformID, encodingID) if cmapSubtable is not None: return cmapSubtable.cmap from compositor.error import CompositorError raise CompositorError("Found neither CMAP (3, 10), (0, 3), nor (3, 1) in font.") def reverseCMAP(cmap): reversed = {} # ... rest of the code ...
5e72bf8392bc4844ba5abcd49eb63c55d19d4657
applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt
applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt
package com.example.billing.reocurringPayments import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController @RestController class Controller { @Autowired private lateinit var paymentGateway: com.example.payments.Gateway @Autowired private lateinit var counter: CounterService @Autowired private lateinit var service: Service @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders() responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service.thisMayFail() val response: ResponseEntity<String> if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { response = ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
package com.example.billing.reocurringPayments import com.example.payments.Gateway import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController import javax.inject.Inject @RestController class Controller { private val paymentGateway: com.example.payments.Gateway private val counter: CounterService private val service: Service @Inject constructor(paymentGateway: Gateway, counterService: CounterService, service: Service) { this.paymentGateway = paymentGateway this.counter = counterService this.service = service } @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders() responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service.thisMayFail() val response: ResponseEntity<String> if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { response = ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
Use constructor injection so you can make instance variables vals
Use constructor injection so you can make instance variables vals
Kotlin
mit
mikegehard/user-management-evolution-kotlin,mikegehard/user-management-evolution-kotlin,mikegehard/user-management-evolution-kotlin
kotlin
## Code Before: package com.example.billing.reocurringPayments import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController @RestController class Controller { @Autowired private lateinit var paymentGateway: com.example.payments.Gateway @Autowired private lateinit var counter: CounterService @Autowired private lateinit var service: Service @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders() responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service.thisMayFail() val response: ResponseEntity<String> if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { response = ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } } ## Instruction: Use constructor injection so you can make instance variables vals ## Code After: package com.example.billing.reocurringPayments import com.example.payments.Gateway import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController import javax.inject.Inject @RestController class Controller { private val paymentGateway: com.example.payments.Gateway private val counter: CounterService private val service: Service @Inject constructor(paymentGateway: Gateway, counterService: CounterService, service: Service) { this.paymentGateway = paymentGateway this.counter = counterService this.service = service } @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders() responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service.thisMayFail() val response: ResponseEntity<String> if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { response = ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
... package com.example.billing.reocurringPayments import com.example.payments.Gateway import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus ... import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController import javax.inject.Inject @RestController class Controller { private val paymentGateway: com.example.payments.Gateway private val counter: CounterService private val service: Service @Inject constructor(paymentGateway: Gateway, counterService: CounterService, service: Service) { this.paymentGateway = paymentGateway this.counter = counterService this.service = service } @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { ...
9ce80d4b4a27e5a32504c6b00ffcff846c53a649
froide/publicbody/widgets.py
froide/publicbody/widgets.py
import json from django import forms from .models import PublicBody class PublicBodySelect(forms.Widget): input_type = "text" template_name = 'publicbody/_chooser.html' initial_search = None class Media: extend = False js = ('js/publicbody.js',) def set_initial_search(self, search): self.initial_search = search def get_context(self, name, value=None, attrs=None): pb, pb_desc = None, None if value is not None: try: pb = PublicBody.objects.get(pk=int(value)) pb_desc = pb.get_label() except (ValueError, PublicBody.DoesNotExist): pass context = super().get_context(name, value, attrs) context['widget'].update({ 'value_label': pb_desc, 'search': self.initial_search, 'publicbody': pb, 'json': json.dumps({ 'fields': { name: { 'value': value, 'objects': pb.as_data() if pb is not None else None } } }) }) return context
import json from django import forms from .models import PublicBody class PublicBodySelect(forms.Widget): input_type = "text" template_name = 'publicbody/_chooser.html' initial_search = None class Media: extend = False js = ('js/publicbody.js',) def set_initial_search(self, search): self.initial_search = search def get_context(self, name, value=None, attrs=None): pb, pb_desc = None, None if value is not None: try: pb = PublicBody.objects.get(pk=int(value)) pb_desc = pb.get_label() except (ValueError, PublicBody.DoesNotExist): pass context = super(PublicBodySelect, self).get_context(name, value, attrs) context['widget'].update({ 'value_label': pb_desc, 'search': self.initial_search, 'publicbody': pb, 'json': json.dumps({ 'fields': { name: { 'value': value, 'objects': pb.as_data() if pb is not None else None } } }) }) return context
Fix super call for Python 2.7
Fix super call for Python 2.7
Python
mit
fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide
python
## Code Before: import json from django import forms from .models import PublicBody class PublicBodySelect(forms.Widget): input_type = "text" template_name = 'publicbody/_chooser.html' initial_search = None class Media: extend = False js = ('js/publicbody.js',) def set_initial_search(self, search): self.initial_search = search def get_context(self, name, value=None, attrs=None): pb, pb_desc = None, None if value is not None: try: pb = PublicBody.objects.get(pk=int(value)) pb_desc = pb.get_label() except (ValueError, PublicBody.DoesNotExist): pass context = super().get_context(name, value, attrs) context['widget'].update({ 'value_label': pb_desc, 'search': self.initial_search, 'publicbody': pb, 'json': json.dumps({ 'fields': { name: { 'value': value, 'objects': pb.as_data() if pb is not None else None } } }) }) return context ## Instruction: Fix super call for Python 2.7 ## Code After: import json from django import forms from .models import PublicBody class PublicBodySelect(forms.Widget): input_type = "text" template_name = 'publicbody/_chooser.html' initial_search = None class Media: extend = False js = ('js/publicbody.js',) def set_initial_search(self, search): self.initial_search = search def get_context(self, name, value=None, attrs=None): pb, pb_desc = None, None if value is not None: try: pb = PublicBody.objects.get(pk=int(value)) pb_desc = pb.get_label() except (ValueError, PublicBody.DoesNotExist): pass context = super(PublicBodySelect, self).get_context(name, value, attrs) context['widget'].update({ 'value_label': pb_desc, 'search': self.initial_search, 'publicbody': pb, 'json': json.dumps({ 'fields': { name: { 'value': value, 'objects': pb.as_data() if pb is not None else None } } }) }) return context
... pb_desc = pb.get_label() except (ValueError, PublicBody.DoesNotExist): pass context = super(PublicBodySelect, self).get_context(name, value, attrs) context['widget'].update({ 'value_label': pb_desc, 'search': self.initial_search, ...
54d55ada152338cc038a4249e03ee25c4739c68f
python/sum-of-multiples/sum_of_multiples.py
python/sum-of-multiples/sum_of_multiples.py
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
Refactor to use list comprehension
Refactor to use list comprehension
Python
mit
rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism
python
## Code Before: def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples ## Instruction: Refactor to use list comprehension ## Code After: def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
... if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0] ...
08d8a5ec7d018c771787bd33b429c2b2d096a578
AAChartKit/ChartsDemo/SecondViewController.h
AAChartKit/ChartsDemo/SecondViewController.h
// // ViewController.h // AAChartKit // // Created by An An on 17/3/13. // Copyright © 2017年 An An. All rights reserved. // source code ----*** https://github.com/AAChartModel/AAChartKit ***--- source code // #import <UIKit/UIKit.h> typedef NS_ENUM(NSInteger,ENUM_secondeViewController_chartType){ ENUM_secondeViewController_chartTypeColumn =0, ENUM_secondeViewController_chartTypeBar, ENUM_secondeViewController_chartTypeArea, ENUM_secondeViewController_chartTypeAreaspline, ENUM_secondeViewController_chartTypeLine, ENUM_secondeViewController_chartTypeSpline, ENUM_secondeViewController_chartTypeScatter, }; @interface SecondViewController : UIViewController @property(nonatomic,assign)NSInteger ENUM_secondeViewController_chartType; @property(nonatomic,copy)NSString *receivedChartType; @end
// // ViewController.h // AAChartKit // // Created by An An on 17/3/13. // Copyright © 2017年 An An. All rights reserved. // source code ----*** https://github.com/AAChartModel/AAChartKit ***--- source code // #import <UIKit/UIKit.h> typedef NS_ENUM(NSInteger,SecondeViewControllerChartType){ SecondeViewControllerChartTypeColumn =0, SecondeViewControllerChartTypeBar, SecondeViewControllerChartTypeArea, SecondeViewControllerChartTypeAreaspline, SecondeViewControllerChartTypeLine, SecondeViewControllerChartTypeSpline, SecondeViewControllerChartTypeScatter, }; @interface SecondViewController : UIViewController @property(nonatomic,assign)NSInteger SecondeViewControllerChartType; @property(nonatomic,copy)NSString *receivedChartType; @end
Correct the naming notations of enumeration
Correct the naming notations of enumeration
C
mit
AAChartModel/AAChartKit,AAChartModel/AAChartKit,AAChartModel/AAChartKit
c
## Code Before: // // ViewController.h // AAChartKit // // Created by An An on 17/3/13. // Copyright © 2017年 An An. All rights reserved. // source code ----*** https://github.com/AAChartModel/AAChartKit ***--- source code // #import <UIKit/UIKit.h> typedef NS_ENUM(NSInteger,ENUM_secondeViewController_chartType){ ENUM_secondeViewController_chartTypeColumn =0, ENUM_secondeViewController_chartTypeBar, ENUM_secondeViewController_chartTypeArea, ENUM_secondeViewController_chartTypeAreaspline, ENUM_secondeViewController_chartTypeLine, ENUM_secondeViewController_chartTypeSpline, ENUM_secondeViewController_chartTypeScatter, }; @interface SecondViewController : UIViewController @property(nonatomic,assign)NSInteger ENUM_secondeViewController_chartType; @property(nonatomic,copy)NSString *receivedChartType; @end ## Instruction: Correct the naming notations of enumeration ## Code After: // // ViewController.h // AAChartKit // // Created by An An on 17/3/13. // Copyright © 2017年 An An. All rights reserved. // source code ----*** https://github.com/AAChartModel/AAChartKit ***--- source code // #import <UIKit/UIKit.h> typedef NS_ENUM(NSInteger,SecondeViewControllerChartType){ SecondeViewControllerChartTypeColumn =0, SecondeViewControllerChartTypeBar, SecondeViewControllerChartTypeArea, SecondeViewControllerChartTypeAreaspline, SecondeViewControllerChartTypeLine, SecondeViewControllerChartTypeSpline, SecondeViewControllerChartTypeScatter, }; @interface SecondViewController : UIViewController @property(nonatomic,assign)NSInteger SecondeViewControllerChartType; @property(nonatomic,copy)NSString *receivedChartType; @end
... // #import <UIKit/UIKit.h> typedef NS_ENUM(NSInteger,SecondeViewControllerChartType){ SecondeViewControllerChartTypeColumn =0, SecondeViewControllerChartTypeBar, SecondeViewControllerChartTypeArea, SecondeViewControllerChartTypeAreaspline, SecondeViewControllerChartTypeLine, SecondeViewControllerChartTypeSpline, SecondeViewControllerChartTypeScatter, }; @interface SecondViewController : UIViewController @property(nonatomic,assign)NSInteger SecondeViewControllerChartType; @property(nonatomic,copy)NSString *receivedChartType; @end ...
8d034ca0c30166ec3972d0f8db83e00ff4f8055f
setup.py
setup.py
from distutils.core import setup setup(name='steamfootbridge', version='0.0.1', packages=['steamfootbridge'], scripts=['bin/steamfootbridge'], )
from setuptools import setup setup(name='steamfootbridge', version='0.0.1', packages=['steamfootbridge'], scripts=['bin/steamfootbridge'], install_requires=[ 'steamodd', ], )
Convert Python installation to PyPi
Convert Python installation to PyPi This does mean python-pip or similar will need to be installed on the system.
Python
mit
sirnuke/steamfootbridge,sirnuke/steamfootbridge
python
## Code Before: from distutils.core import setup setup(name='steamfootbridge', version='0.0.1', packages=['steamfootbridge'], scripts=['bin/steamfootbridge'], ) ## Instruction: Convert Python installation to PyPi This does mean python-pip or similar will need to be installed on the system. ## Code After: from setuptools import setup setup(name='steamfootbridge', version='0.0.1', packages=['steamfootbridge'], scripts=['bin/steamfootbridge'], install_requires=[ 'steamodd', ], )
// ... existing code ... from setuptools import setup setup(name='steamfootbridge', version='0.0.1', packages=['steamfootbridge'], scripts=['bin/steamfootbridge'], install_requires=[ 'steamodd', ], ) // ... rest of the code ...
cabf58d78d3ecbfaefbec60406de355d4b5a1f00
core/src/main/java/org/radargun/config/FlightRecorder.java
core/src/main/java/org/radargun/config/FlightRecorder.java
package org.radargun.config; import java.util.List; import static org.radargun.config.VmArgUtils.ensureArg; import static org.radargun.config.VmArgUtils.replace; public class FlightRecorder implements VmArg { @Property(doc = "Start flight recording for the benchmark.", optional = false) private boolean enabled = false; @Property(doc = "File for the recording.") private String filename; @Property(doc = "Settings file with recording configuration.") private String settings; @Override public void setArgs(List<String> args) { if (!enabled) return; StringBuilder recordingParams = new StringBuilder("=compress=false,delay=10s,duration=24h"); if (filename != null) recordingParams.append(",filename=").append(filename); if (settings != null) recordingParams.append(",settings=").append(settings); ensureArg(args, "-XX:+UnlockCommercialFeatures"); ensureArg(args, "-XX:+FlightRecorder"); replace(args, "-XX:StartFlightRecording", recordingParams.toString()); } public boolean isEnabled() { return enabled; } public String getFilename() { return filename; } public String getSettings() { return settings; } }
package org.radargun.config; import java.util.List; import static org.radargun.config.VmArgUtils.ensureArg; import static org.radargun.config.VmArgUtils.replace; public class FlightRecorder implements VmArg { @Property(doc = "Start flight recording for the benchmark.", optional = false) private boolean enabled = false; @Property(doc = "File for the recording.") private String filename; @Property(doc = "Settings file with recording configuration.") private String settings; @Override public void setArgs(List<String> args) { if (!enabled) return; StringBuilder recordingParams = new StringBuilder("=delay=10s,duration=24h"); if (filename != null) recordingParams.append(",filename=").append(filename); if (settings != null) recordingParams.append(",settings=").append(settings); ensureArg(args, "-XX:+UnlockCommercialFeatures"); ensureArg(args, "-XX:+FlightRecorder"); replace(args, "-XX:StartFlightRecording", recordingParams.toString()); } public boolean isEnabled() { return enabled; } public String getFilename() { return filename; } public String getSettings() { return settings; } }
Fix Unknown argument 'compress' in diagnostic for JDK11
Fix Unknown argument 'compress' in diagnostic for JDK11
Java
apache-2.0
radargun/radargun,radargun/radargun,radargun/radargun,radargun/radargun
java
## Code Before: package org.radargun.config; import java.util.List; import static org.radargun.config.VmArgUtils.ensureArg; import static org.radargun.config.VmArgUtils.replace; public class FlightRecorder implements VmArg { @Property(doc = "Start flight recording for the benchmark.", optional = false) private boolean enabled = false; @Property(doc = "File for the recording.") private String filename; @Property(doc = "Settings file with recording configuration.") private String settings; @Override public void setArgs(List<String> args) { if (!enabled) return; StringBuilder recordingParams = new StringBuilder("=compress=false,delay=10s,duration=24h"); if (filename != null) recordingParams.append(",filename=").append(filename); if (settings != null) recordingParams.append(",settings=").append(settings); ensureArg(args, "-XX:+UnlockCommercialFeatures"); ensureArg(args, "-XX:+FlightRecorder"); replace(args, "-XX:StartFlightRecording", recordingParams.toString()); } public boolean isEnabled() { return enabled; } public String getFilename() { return filename; } public String getSettings() { return settings; } } ## Instruction: Fix Unknown argument 'compress' in diagnostic for JDK11 ## Code After: package org.radargun.config; import java.util.List; import static org.radargun.config.VmArgUtils.ensureArg; import static org.radargun.config.VmArgUtils.replace; public class FlightRecorder implements VmArg { @Property(doc = "Start flight recording for the benchmark.", optional = false) private boolean enabled = false; @Property(doc = "File for the recording.") private String filename; @Property(doc = "Settings file with recording configuration.") private String settings; @Override public void setArgs(List<String> args) { if (!enabled) return; StringBuilder recordingParams = new StringBuilder("=delay=10s,duration=24h"); if (filename != null) recordingParams.append(",filename=").append(filename); if (settings != null) recordingParams.append(",settings=").append(settings); ensureArg(args, "-XX:+UnlockCommercialFeatures"); ensureArg(args, "-XX:+FlightRecorder"); replace(args, "-XX:StartFlightRecording", recordingParams.toString()); } public boolean isEnabled() { return enabled; } public String getFilename() { return filename; } public String getSettings() { return settings; } }
... public void setArgs(List<String> args) { if (!enabled) return; StringBuilder recordingParams = new StringBuilder("=delay=10s,duration=24h"); if (filename != null) recordingParams.append(",filename=").append(filename); if (settings != null) ... public String getSettings() { return settings; } } ...
d03f540265a1491863900e35eb4be3e46b35d1c5
project/src/main/java/com/google/sps/data/CompetitorInfo.java
project/src/main/java/com/google/sps/data/CompetitorInfo.java
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.data; import com.google.auto.value.AutoValue; /** * Class representing user data specific to a competition */ @AutoValue public abstract class CompetitorInfo { public static CompetitorInfo create(long id, String name, String email, long netWorth, long amountAvailable) { return new AutoValue_CompetitorInfo(id, name, email, netWorth, amountAvailable); } /** The id of the competitor */ public abstract long id(); /** The competitor's name */ public abstract String name(); /** The competitor's Google email */ public abstract String email(); /** The networth of the competitor */ public abstract long netWorth(); /** The amount the user has available for additional investments */ public abstract long amountAvailable(); }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.data; import com.google.auto.value.AutoValue; /** * Class representing user data specific to a competition */ @AutoValue public abstract class CompetitorInfo { public static CompetitorInfo create(String name, String email, int rank, int rankYesterday, long netWorth, long amountAvailable, int numInvestments) { return new AutoValue_CompetitorInfo(name, email, rank, rankYesterday, netWorth, amountAvailable, numInvestments); } /** The competitor's name */ public abstract String name(); /** The competitor's Google email */ public abstract String email(); /** Competitor's rank */ public abstract int rank(); /** Competitor's rank yesterday */ public abstract int rankYesterday(); /** The networth of the competitor */ public abstract long netWorth(); /** The amount the user has available for additional investments */ public abstract long amountAvailable(); /** The number of investments owned by this competitor */ public abstract int numInvestments(); }
Add class for competitor info
Add class for competitor info
Java
apache-2.0
googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks
java
## Code Before: // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.data; import com.google.auto.value.AutoValue; /** * Class representing user data specific to a competition */ @AutoValue public abstract class CompetitorInfo { public static CompetitorInfo create(long id, String name, String email, long netWorth, long amountAvailable) { return new AutoValue_CompetitorInfo(id, name, email, netWorth, amountAvailable); } /** The id of the competitor */ public abstract long id(); /** The competitor's name */ public abstract String name(); /** The competitor's Google email */ public abstract String email(); /** The networth of the competitor */ public abstract long netWorth(); /** The amount the user has available for additional investments */ public abstract long amountAvailable(); } ## Instruction: Add class for competitor info ## Code After: // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.data; import com.google.auto.value.AutoValue; /** * Class representing user data specific to a competition */ @AutoValue public abstract class CompetitorInfo { public static CompetitorInfo create(String name, String email, int rank, int rankYesterday, long netWorth, long amountAvailable, int numInvestments) { return new AutoValue_CompetitorInfo(name, email, rank, rankYesterday, netWorth, amountAvailable, numInvestments); } /** The competitor's name */ public abstract String name(); /** The competitor's Google email */ public abstract String email(); /** Competitor's rank */ public abstract int rank(); /** Competitor's rank yesterday */ public abstract int rankYesterday(); /** The networth of the competitor */ public abstract long netWorth(); /** The amount the user has available for additional investments */ public abstract long amountAvailable(); /** The number of investments owned by this competitor */ public abstract int numInvestments(); }
# ... existing code ... */ @AutoValue public abstract class CompetitorInfo { public static CompetitorInfo create(String name, String email, int rank, int rankYesterday, long netWorth, long amountAvailable, int numInvestments) { return new AutoValue_CompetitorInfo(name, email, rank, rankYesterday, netWorth, amountAvailable, numInvestments); } /** The competitor's name */ public abstract String name(); /** The competitor's Google email */ public abstract String email(); /** Competitor's rank */ public abstract int rank(); /** Competitor's rank yesterday */ public abstract int rankYesterday(); /** The networth of the competitor */ public abstract long netWorth(); /** The amount the user has available for additional investments */ public abstract long amountAvailable(); /** The number of investments owned by this competitor */ public abstract int numInvestments(); } # ... rest of the code ...
e40ef4cbe59c5c3d064e60f02f60f19b0bb202a4
test_daily_parser.py
test_daily_parser.py
"""Unit tests.""" import unittest from daily_parser import url_from_args class TestDailyParser(unittest.TestCase): """Testing methods from daily_parser.""" def test_url_from_args(self): output = url_from_args(2014, 1) expected = 'https://dons.wikimedia.fr/journal/2014-01' self.assertEqual(output, expected)
"""Unit tests.""" import unittest from daily_parser import url_from_args, DonationsParser class TestDailyParser(unittest.TestCase): """Testing methods from daily_parser.""" def test_url_from_args(self): output = url_from_args(2014, 1) expected = 'https://dons.wikimedia.fr/journal/2014-01' self.assertEqual(output, expected) class TestDonationsParser(unittest.TestCase): """Testing DonationsParser class.""" def setUp(self): self.donations_parser = DonationsParser(2014, 01) donations_data = { '01': {'sum': 370, 'avg': 46.25, 'quantity': 8}, '02': {'sum': 5682, 'avg': 132.14, 'quantity': 43} } self.donations_parser.donations = donations_data def test_get_csv(self): expected = """'day', 'sum', 'quantity', 'avg' '2014-01-01', 370, 8, 46.25 '2014-01-02', 5682, 43, 132.14 """ output = self.donations_parser.get_csv() self.assertEqual(output, expected)
Add unit test for DonationsParser.get_csv
Add unit test for DonationsParser.get_csv
Python
mit
Commonists/DonationsLogParser,Commonists/DonationsLogParser
python
## Code Before: """Unit tests.""" import unittest from daily_parser import url_from_args class TestDailyParser(unittest.TestCase): """Testing methods from daily_parser.""" def test_url_from_args(self): output = url_from_args(2014, 1) expected = 'https://dons.wikimedia.fr/journal/2014-01' self.assertEqual(output, expected) ## Instruction: Add unit test for DonationsParser.get_csv ## Code After: """Unit tests.""" import unittest from daily_parser import url_from_args, DonationsParser class TestDailyParser(unittest.TestCase): """Testing methods from daily_parser.""" def test_url_from_args(self): output = url_from_args(2014, 1) expected = 'https://dons.wikimedia.fr/journal/2014-01' self.assertEqual(output, expected) class TestDonationsParser(unittest.TestCase): """Testing DonationsParser class.""" def setUp(self): self.donations_parser = DonationsParser(2014, 01) donations_data = { '01': {'sum': 370, 'avg': 46.25, 'quantity': 8}, '02': {'sum': 5682, 'avg': 132.14, 'quantity': 43} } self.donations_parser.donations = donations_data def test_get_csv(self): expected = """'day', 'sum', 'quantity', 'avg' '2014-01-01', 370, 8, 46.25 '2014-01-02', 5682, 43, 132.14 """ output = self.donations_parser.get_csv() self.assertEqual(output, expected)
... """Unit tests.""" import unittest from daily_parser import url_from_args, DonationsParser class TestDailyParser(unittest.TestCase): ... output = url_from_args(2014, 1) expected = 'https://dons.wikimedia.fr/journal/2014-01' self.assertEqual(output, expected) class TestDonationsParser(unittest.TestCase): """Testing DonationsParser class.""" def setUp(self): self.donations_parser = DonationsParser(2014, 01) donations_data = { '01': {'sum': 370, 'avg': 46.25, 'quantity': 8}, '02': {'sum': 5682, 'avg': 132.14, 'quantity': 43} } self.donations_parser.donations = donations_data def test_get_csv(self): expected = """'day', 'sum', 'quantity', 'avg' '2014-01-01', 370, 8, 46.25 '2014-01-02', 5682, 43, 132.14 """ output = self.donations_parser.get_csv() self.assertEqual(output, expected) ...
76bbaa5e0208e5c28747fff09388cd52ef63f6f5
blackjax/__init__.py
blackjax/__init__.py
from .diagnostics import effective_sample_size as ess from .diagnostics import potential_scale_reduction as rhat from .kernels import ( adaptive_tempered_smc, elliptical_slice, ghmc, hmc, irmh, mala, meads, mgrad_gaussian, nuts, orbital_hmc, pathfinder_adaptation, rmh, sghmc, sgld, tempered_smc, window_adaptation, ) from .optimizers import dual_averaging, lbfgs __all__ = [ "dual_averaging", # optimizers "lbfgs", "hmc", # mcmc "mala", "mgrad_gaussian", "nuts", "orbital_hmc", "rmh", "irmh", "elliptical_slice", "ghmc", "meads", "sgld", # stochastic gradient mcmc "sghmc", "window_adaptation", # mcmc adaptation "pathfinder_adaptation", "adaptive_tempered_smc", # smc "tempered_smc", "ess", # diagnostics "rhat", ] from . import _version __version__ = _version.get_versions()["version"]
from .diagnostics import effective_sample_size as ess from .diagnostics import potential_scale_reduction as rhat from .kernels import ( adaptive_tempered_smc, elliptical_slice, ghmc, hmc, irmh, mala, meads, mgrad_gaussian, nuts, orbital_hmc, pathfinder, pathfinder_adaptation, rmh, sghmc, sgld, tempered_smc, window_adaptation, ) from .optimizers import dual_averaging, lbfgs __all__ = [ "dual_averaging", # optimizers "lbfgs", "hmc", # mcmc "mala", "mgrad_gaussian", "nuts", "orbital_hmc", "rmh", "irmh", "elliptical_slice", "ghmc", "meads", "sgld", # stochastic gradient mcmc "sghmc", "window_adaptation", # mcmc adaptation "pathfinder_adaptation", "adaptive_tempered_smc", # smc "tempered_smc", "pathfinder", # variational inference "ess", # diagnostics "rhat", ] from . import _version __version__ = _version.get_versions()["version"]
Add `pathfinder` to the library namespace
Add `pathfinder` to the library namespace
Python
apache-2.0
blackjax-devs/blackjax
python
## Code Before: from .diagnostics import effective_sample_size as ess from .diagnostics import potential_scale_reduction as rhat from .kernels import ( adaptive_tempered_smc, elliptical_slice, ghmc, hmc, irmh, mala, meads, mgrad_gaussian, nuts, orbital_hmc, pathfinder_adaptation, rmh, sghmc, sgld, tempered_smc, window_adaptation, ) from .optimizers import dual_averaging, lbfgs __all__ = [ "dual_averaging", # optimizers "lbfgs", "hmc", # mcmc "mala", "mgrad_gaussian", "nuts", "orbital_hmc", "rmh", "irmh", "elliptical_slice", "ghmc", "meads", "sgld", # stochastic gradient mcmc "sghmc", "window_adaptation", # mcmc adaptation "pathfinder_adaptation", "adaptive_tempered_smc", # smc "tempered_smc", "ess", # diagnostics "rhat", ] from . import _version __version__ = _version.get_versions()["version"] ## Instruction: Add `pathfinder` to the library namespace ## Code After: from .diagnostics import effective_sample_size as ess from .diagnostics import potential_scale_reduction as rhat from .kernels import ( adaptive_tempered_smc, elliptical_slice, ghmc, hmc, irmh, mala, meads, mgrad_gaussian, nuts, orbital_hmc, pathfinder, pathfinder_adaptation, rmh, sghmc, sgld, tempered_smc, window_adaptation, ) from .optimizers import dual_averaging, lbfgs __all__ = [ "dual_averaging", # optimizers "lbfgs", "hmc", # mcmc "mala", "mgrad_gaussian", "nuts", "orbital_hmc", "rmh", "irmh", "elliptical_slice", "ghmc", "meads", "sgld", # stochastic gradient mcmc "sghmc", "window_adaptation", # mcmc adaptation "pathfinder_adaptation", "adaptive_tempered_smc", # smc "tempered_smc", "pathfinder", # variational inference "ess", # diagnostics "rhat", ] from . import _version __version__ = _version.get_versions()["version"]
# ... existing code ... mgrad_gaussian, nuts, orbital_hmc, pathfinder, pathfinder_adaptation, rmh, sghmc, # ... modified code ... "pathfinder_adaptation", "adaptive_tempered_smc", # smc "tempered_smc", "pathfinder", # variational inference "ess", # diagnostics "rhat", ] # ... rest of the code ...
148e4319c5f91851892ad306bac9ab47daf42b98
xstream/src/java/com/thoughtworks/xstream/core/util/ClassStack.java
xstream/src/java/com/thoughtworks/xstream/core/util/ClassStack.java
package com.thoughtworks.xstream.core.util; public final class ClassStack { private Class[] stack; private int pointer; public ClassStack(int initialCapacity) { stack = new Class[initialCapacity]; } public void push(Class value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; } public void popSilently() { pointer--; } public Class pop() { return stack[--pointer]; } public Class peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public Class get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Class[] newStack = new Class[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } }
package com.thoughtworks.xstream.core.util; public final class ClassStack { private Class[] stack; private int pointer; public ClassStack(int initialCapacity) { stack = new Class[initialCapacity]; } public void push(Class value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; } public void popSilently() { stack[--pointer] = null; } public Class pop() { final Class result = stack[--pointer]; stack[pointer] = null; return result; } public Class peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public Class get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Class[] newStack = new Class[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } }
Drop references from popped classes.
Drop references from popped classes. git-svn-id: fe6d842192ccfb78748eb71580d1ce65f168b559@704 9830eeb5-ddf4-0310-9ef7-f4b9a3e3227e
Java
bsd-3-clause
svn2github/xstream,svn2github/xstream
java
## Code Before: package com.thoughtworks.xstream.core.util; public final class ClassStack { private Class[] stack; private int pointer; public ClassStack(int initialCapacity) { stack = new Class[initialCapacity]; } public void push(Class value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; } public void popSilently() { pointer--; } public Class pop() { return stack[--pointer]; } public Class peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public Class get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Class[] newStack = new Class[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } } ## Instruction: Drop references from popped classes. git-svn-id: fe6d842192ccfb78748eb71580d1ce65f168b559@704 9830eeb5-ddf4-0310-9ef7-f4b9a3e3227e ## Code After: package com.thoughtworks.xstream.core.util; public final class ClassStack { private Class[] stack; private int pointer; public ClassStack(int initialCapacity) { stack = new Class[initialCapacity]; } public void push(Class value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; } public void popSilently() { stack[--pointer] = null; } public Class pop() { final Class result = stack[--pointer]; stack[pointer] = null; return result; } public Class peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public Class get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Class[] newStack = new Class[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } }
... } public void popSilently() { stack[--pointer] = null; } public Class pop() { final Class result = stack[--pointer]; stack[pointer] = null; return result; } public Class peek() { ...
fbb675f33933c2dd06f9853b042aea9613a0d602
setup.py
setup.py
import os import re from setuptools import setup, find_packages _here = os.path.dirname(__file__) _init = os.path.join(_here, 'van', 'contactology', '__init__.py') _init = open(_init, 'r').read() VERSION = re.search(r'^__version__ = "(.*)"', _init, re.MULTILINE).group(1) setup(name="van.contactology", version=VERSION, packages=find_packages(), description="Contactology API for Twisted", namespace_packages=["van"], install_requires=[ 'pyOpenSSL', 'setuptools', 'Twisted', 'simplejson', ], test_suite="van.contactology.tests", tests_require=['mock'], include_package_data=True, zip_safe=False, )
import os import re from setuptools import setup, find_packages _here = os.path.dirname(__file__) _init = os.path.join(_here, 'van', 'contactology', '__init__.py') _init = open(_init, 'r').read() VERSION = re.search(r'^__version__ = "(.*)"', _init, re.MULTILINE).group(1) README = open(os.path.join(_here, 'README.txt'), 'r').read() setup(name="van.contactology", version=VERSION, packages=find_packages(), description="Contactology API for Twisted", author_email='[email protected]', long_description=README, namespace_packages=["van"], install_requires=[ 'pyOpenSSL', 'setuptools', 'Twisted', 'simplejson', ], test_suite="van.contactology.tests", tests_require=['mock'], include_package_data=True, zip_safe=False, )
Add contact information and readme in long description.
Add contact information and readme in long description.
Python
bsd-3-clause
jinty/van.contactology
python
## Code Before: import os import re from setuptools import setup, find_packages _here = os.path.dirname(__file__) _init = os.path.join(_here, 'van', 'contactology', '__init__.py') _init = open(_init, 'r').read() VERSION = re.search(r'^__version__ = "(.*)"', _init, re.MULTILINE).group(1) setup(name="van.contactology", version=VERSION, packages=find_packages(), description="Contactology API for Twisted", namespace_packages=["van"], install_requires=[ 'pyOpenSSL', 'setuptools', 'Twisted', 'simplejson', ], test_suite="van.contactology.tests", tests_require=['mock'], include_package_data=True, zip_safe=False, ) ## Instruction: Add contact information and readme in long description. ## Code After: import os import re from setuptools import setup, find_packages _here = os.path.dirname(__file__) _init = os.path.join(_here, 'van', 'contactology', '__init__.py') _init = open(_init, 'r').read() VERSION = re.search(r'^__version__ = "(.*)"', _init, re.MULTILINE).group(1) README = open(os.path.join(_here, 'README.txt'), 'r').read() setup(name="van.contactology", version=VERSION, packages=find_packages(), description="Contactology API for Twisted", author_email='[email protected]', long_description=README, namespace_packages=["van"], install_requires=[ 'pyOpenSSL', 'setuptools', 'Twisted', 'simplejson', ], test_suite="van.contactology.tests", tests_require=['mock'], include_package_data=True, zip_safe=False, )
... VERSION = re.search(r'^__version__ = "(.*)"', _init, re.MULTILINE).group(1) README = open(os.path.join(_here, 'README.txt'), 'r').read() setup(name="van.contactology", version=VERSION, packages=find_packages(), description="Contactology API for Twisted", author_email='[email protected]', long_description=README, namespace_packages=["van"], install_requires=[ 'pyOpenSSL', ...
e15fb53c0fd63942cafd3a6f11418447df6b6800
siphon/cdmr/tests/test_coveragedataset.py
siphon/cdmr/tests/test_coveragedataset.py
import warnings from siphon.testing import get_recorder from siphon.cdmr.coveragedataset import CoverageDataset recorder = get_recorder(__file__) # Ignore warnings about CoverageDataset warnings.simplefilter('ignore') @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature(): 'Just a smoke test for CDMRemoteFeature' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert cd.grids
import warnings from siphon.testing import get_recorder from siphon.cdmr.coveragedataset import CoverageDataset recorder = get_recorder(__file__) # Ignore warnings about CoverageDataset warnings.simplefilter('ignore') @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature(): 'Just a smoke test for CDMRemoteFeature' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert cd.grids @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature_str(): 'Just a smoke test for converting CoverageDataset to str' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert str(cd)
Add smoketest for convering CoverageDataset to str.
Add smoketest for convering CoverageDataset to str.
Python
bsd-3-clause
dopplershift/siphon,dopplershift/siphon,Unidata/siphon
python
## Code Before: import warnings from siphon.testing import get_recorder from siphon.cdmr.coveragedataset import CoverageDataset recorder = get_recorder(__file__) # Ignore warnings about CoverageDataset warnings.simplefilter('ignore') @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature(): 'Just a smoke test for CDMRemoteFeature' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert cd.grids ## Instruction: Add smoketest for convering CoverageDataset to str. ## Code After: import warnings from siphon.testing import get_recorder from siphon.cdmr.coveragedataset import CoverageDataset recorder = get_recorder(__file__) # Ignore warnings about CoverageDataset warnings.simplefilter('ignore') @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature(): 'Just a smoke test for CDMRemoteFeature' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert cd.grids @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature_str(): 'Just a smoke test for converting CoverageDataset to str' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert str(cd)
# ... existing code ... cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert cd.grids @recorder.use_cassette('hrrr_cdmremotefeature') def test_simple_cdmremotefeature_str(): 'Just a smoke test for converting CoverageDataset to str' cd = CoverageDataset('http://localhost:8080/thredds/cdmrfeature/grid/' 'test/HRRR_CONUS_2p5km_20160309_1600.grib2') assert str(cd) # ... rest of the code ...
5adc4a0637b31de518b30bbc662c3d50bc523a5a
airtravel.py
airtravel.py
"""Model for aircraft flights""" class Flight: def __init__(self, number): if not number[:4].isalpha(): raise ValueError("No airline code in '{}'".format(number)) if not number[:4].isupper(): raise ValueError("Invalid airline code'{}'".format(number)) if not (number[4:].isdigit() and int(number[4:]) <= 999999): raise ValueError("Invalid route number '{}'".format(number)) self._number = number def number(self): return self._number def airline(self): return self._number[:4] class Aircraft: def __init__(self, registration, model, num_rows, num_seats_per_row): self._registration = registration self._model = model self._num_rows = num_rows self._num_seats_per_row = num_seats_per_row def registration(self): return self._registration def model(self): return self._model
"""Model for aircraft flights""" class Flight: def __init__(self, number): if not number[:4].isalpha(): raise ValueError("No airline code in '{}'".format(number)) if not number[:4].isupper(): raise ValueError("Invalid airline code'{}'".format(number)) if not (number[4:].isdigit() and int(number[4:]) <= 999999): raise ValueError("Invalid route number '{}'".format(number)) self._number = number def number(self): return self._number def airline(self): return self._number[:4] class Aircraft: def __init__(self, registration, model, num_rows, num_seats_per_row): self._registration = registration self._model = model self._num_rows = num_rows self._num_seats_per_row = num_seats_per_row def registration(self): return self._registration def model(self): return self._model def seating_plan(self): return (range(1, self._num_rows + 1), "ABCDEFGHJKLMNOP"[:self._num_seats_per_row])
Add seating plan to aircraft
Add seating plan to aircraft
Python
mit
kentoj/python-fundamentals
python
## Code Before: """Model for aircraft flights""" class Flight: def __init__(self, number): if not number[:4].isalpha(): raise ValueError("No airline code in '{}'".format(number)) if not number[:4].isupper(): raise ValueError("Invalid airline code'{}'".format(number)) if not (number[4:].isdigit() and int(number[4:]) <= 999999): raise ValueError("Invalid route number '{}'".format(number)) self._number = number def number(self): return self._number def airline(self): return self._number[:4] class Aircraft: def __init__(self, registration, model, num_rows, num_seats_per_row): self._registration = registration self._model = model self._num_rows = num_rows self._num_seats_per_row = num_seats_per_row def registration(self): return self._registration def model(self): return self._model ## Instruction: Add seating plan to aircraft ## Code After: """Model for aircraft flights""" class Flight: def __init__(self, number): if not number[:4].isalpha(): raise ValueError("No airline code in '{}'".format(number)) if not number[:4].isupper(): raise ValueError("Invalid airline code'{}'".format(number)) if not (number[4:].isdigit() and int(number[4:]) <= 999999): raise ValueError("Invalid route number '{}'".format(number)) self._number = number def number(self): return self._number def airline(self): return self._number[:4] class Aircraft: def __init__(self, registration, model, num_rows, num_seats_per_row): self._registration = registration self._model = model self._num_rows = num_rows self._num_seats_per_row = num_seats_per_row def registration(self): return self._registration def model(self): return self._model def seating_plan(self): return (range(1, self._num_rows + 1), "ABCDEFGHJKLMNOP"[:self._num_seats_per_row])
# ... existing code ... def model(self): return self._model def seating_plan(self): return (range(1, self._num_rows + 1), "ABCDEFGHJKLMNOP"[:self._num_seats_per_row]) # ... rest of the code ...
d7f0285788b9a4710766ec3f1275d8b7460cb32b
cukes-oauth/src/main/java/lv/ctco/cukes/oauth/GrantType.java
cukes-oauth/src/main/java/lv/ctco/cukes/oauth/GrantType.java
package lv.ctco.cukes.oauth; import lv.ctco.cukes.core.internal.context.GlobalWorldFacade; import java.util.HashMap; import java.util.Map; public enum GrantType { client_credentials, password(OAuthCukesConstants.USER_NAME, OAuthCukesConstants.PASSWORD); private static final Map<String, String> attributeNameMapping = new HashMap<>(); static { attributeNameMapping.put("username", OAuthCukesConstants.USER_NAME); attributeNameMapping.put("password", OAuthCukesConstants.PASSWORD); } private String[] requiredAttributes; GrantType(String... requiredAttributes) { this.requiredAttributes = requiredAttributes; } public Map<String, String> getParameters(GlobalWorldFacade world) { Map<String, String> parameters = new HashMap<>(); parameters.put("grant_type", name()); for (String attribute : requiredAttributes) { parameters.put(attributeNameMapping.get(attribute), world.getOrThrow(attribute)); } return parameters; } }
package lv.ctco.cukes.oauth; import lv.ctco.cukes.core.internal.context.GlobalWorldFacade; import java.util.HashMap; import java.util.Map; public enum GrantType { client_credentials, password(OAuthCukesConstants.USER_NAME, OAuthCukesConstants.PASSWORD); private static final Map<String, String> attributeNameMapping = new HashMap<>(); static { attributeNameMapping.put(OAuthCukesConstants.USER_NAME, "username"); attributeNameMapping.put(OAuthCukesConstants.PASSWORD, "password"); } private String[] requiredAttributes; GrantType(String... requiredAttributes) { this.requiredAttributes = requiredAttributes; } public Map<String, String> getParameters(GlobalWorldFacade world) { Map<String, String> parameters = new HashMap<>(); parameters.put("grant_type", name()); for (String attribute : requiredAttributes) { parameters.put(attributeNameMapping.get(attribute), world.getOrThrow(attribute)); } return parameters; } }
Fix issue with "password" grant type, fix map order
Fix issue with "password" grant type, fix map order
Java
apache-2.0
ctco/cukes
java
## Code Before: package lv.ctco.cukes.oauth; import lv.ctco.cukes.core.internal.context.GlobalWorldFacade; import java.util.HashMap; import java.util.Map; public enum GrantType { client_credentials, password(OAuthCukesConstants.USER_NAME, OAuthCukesConstants.PASSWORD); private static final Map<String, String> attributeNameMapping = new HashMap<>(); static { attributeNameMapping.put("username", OAuthCukesConstants.USER_NAME); attributeNameMapping.put("password", OAuthCukesConstants.PASSWORD); } private String[] requiredAttributes; GrantType(String... requiredAttributes) { this.requiredAttributes = requiredAttributes; } public Map<String, String> getParameters(GlobalWorldFacade world) { Map<String, String> parameters = new HashMap<>(); parameters.put("grant_type", name()); for (String attribute : requiredAttributes) { parameters.put(attributeNameMapping.get(attribute), world.getOrThrow(attribute)); } return parameters; } } ## Instruction: Fix issue with "password" grant type, fix map order ## Code After: package lv.ctco.cukes.oauth; import lv.ctco.cukes.core.internal.context.GlobalWorldFacade; import java.util.HashMap; import java.util.Map; public enum GrantType { client_credentials, password(OAuthCukesConstants.USER_NAME, OAuthCukesConstants.PASSWORD); private static final Map<String, String> attributeNameMapping = new HashMap<>(); static { attributeNameMapping.put(OAuthCukesConstants.USER_NAME, "username"); attributeNameMapping.put(OAuthCukesConstants.PASSWORD, "password"); } private String[] requiredAttributes; GrantType(String... requiredAttributes) { this.requiredAttributes = requiredAttributes; } public Map<String, String> getParameters(GlobalWorldFacade world) { Map<String, String> parameters = new HashMap<>(); parameters.put("grant_type", name()); for (String attribute : requiredAttributes) { parameters.put(attributeNameMapping.get(attribute), world.getOrThrow(attribute)); } return parameters; } }
# ... existing code ... private static final Map<String, String> attributeNameMapping = new HashMap<>(); static { attributeNameMapping.put(OAuthCukesConstants.USER_NAME, "username"); attributeNameMapping.put(OAuthCukesConstants.PASSWORD, "password"); } private String[] requiredAttributes; # ... rest of the code ...
d41d79bc430a07e9c1ebb9fdd80f995ef5f58336
src/main/java/org/monospark/spongematchers/parser/element/ListElementParser.java
src/main/java/org/monospark/spongematchers/parser/element/ListElementParser.java
package org.monospark.spongematchers.parser.element; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.monospark.spongematchers.util.PatternBuilder; import com.google.common.collect.Lists; public final class ListElementParser extends StringElementParser { @Override Pattern createPattern() { return new PatternBuilder() .openNamedParantheses("list") .appendNonCapturingPart("[") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .openAnonymousParantheses() .appendNonCapturingPart("\\s*,\\s*") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .closeParantheses() .zeroOrMore() .appendNonCapturingPart("]") .closeParantheses() .or() .build(); } @Override void parse(Matcher matcher, StringElementContext context) { createList(matcher, context); } private void createList(Matcher matcher, StringElementContext context) { List<StringElement> elements = Lists.newArrayList(); Matcher elementMatcher = StringElementParser.REPLACE_PATTERN.matcher(matcher.group()); while (elementMatcher.find()) { StringElement element = context.getElementAt(elementMatcher.start(), elementMatcher.end()); context.removeElement(element); elements.add(element); } context.addElement(new ListElement(matcher.start(), matcher.end(), elements));; } }
package org.monospark.spongematchers.parser.element; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.monospark.spongematchers.util.PatternBuilder; import com.google.common.collect.Lists; public final class ListElementParser extends StringElementParser { @Override Pattern createPattern() { return new PatternBuilder() .appendNonCapturingPart("\\[") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .openAnonymousParantheses() .appendNonCapturingPart("\\s*,\\s*") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .closeParantheses() .zeroOrMore() .appendNonCapturingPart("\\]") .build(); } @Override void parse(Matcher matcher, StringElementContext context) { createList(matcher, context); } private void createList(Matcher matcher, StringElementContext context) { List<StringElement> elements = Lists.newArrayList(); Matcher elementMatcher = StringElementParser.REPLACE_PATTERN.matcher(matcher.group()); while (elementMatcher.find()) { StringElement element = context.getElementAt(elementMatcher.start(), elementMatcher.end()); context.removeElement(element); elements.add(element); } context.addElement(new ListElement(matcher.start(), matcher.end(), elements));; } }
Fix some bugs in the list parser regex
Fix some bugs in the list parser regex
Java
mit
Monospark/SpongeMatchers
java
## Code Before: package org.monospark.spongematchers.parser.element; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.monospark.spongematchers.util.PatternBuilder; import com.google.common.collect.Lists; public final class ListElementParser extends StringElementParser { @Override Pattern createPattern() { return new PatternBuilder() .openNamedParantheses("list") .appendNonCapturingPart("[") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .openAnonymousParantheses() .appendNonCapturingPart("\\s*,\\s*") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .closeParantheses() .zeroOrMore() .appendNonCapturingPart("]") .closeParantheses() .or() .build(); } @Override void parse(Matcher matcher, StringElementContext context) { createList(matcher, context); } private void createList(Matcher matcher, StringElementContext context) { List<StringElement> elements = Lists.newArrayList(); Matcher elementMatcher = StringElementParser.REPLACE_PATTERN.matcher(matcher.group()); while (elementMatcher.find()) { StringElement element = context.getElementAt(elementMatcher.start(), elementMatcher.end()); context.removeElement(element); elements.add(element); } context.addElement(new ListElement(matcher.start(), matcher.end(), elements));; } } ## Instruction: Fix some bugs in the list parser regex ## Code After: package org.monospark.spongematchers.parser.element; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.monospark.spongematchers.util.PatternBuilder; import com.google.common.collect.Lists; public final class ListElementParser extends StringElementParser { @Override Pattern createPattern() { return new PatternBuilder() .appendNonCapturingPart("\\[") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .openAnonymousParantheses() .appendNonCapturingPart("\\s*,\\s*") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .closeParantheses() .zeroOrMore() .appendNonCapturingPart("\\]") .build(); } @Override void parse(Matcher matcher, StringElementContext context) { createList(matcher, context); } private void createList(Matcher matcher, StringElementContext context) { List<StringElement> elements = Lists.newArrayList(); Matcher elementMatcher = StringElementParser.REPLACE_PATTERN.matcher(matcher.group()); while (elementMatcher.find()) { StringElement element = context.getElementAt(elementMatcher.start(), elementMatcher.end()); context.removeElement(element); elements.add(element); } context.addElement(new ListElement(matcher.start(), matcher.end(), elements));; } }
... @Override Pattern createPattern() { return new PatternBuilder() .appendNonCapturingPart("\\[") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .openAnonymousParantheses() .appendNonCapturingPart("\\s*,\\s*") .appendNonCapturingPart(StringElementParser.REPLACE_PATTERN) .closeParantheses() .zeroOrMore() .appendNonCapturingPart("\\]") .build(); } ...
d9ed160e54ff40783a007154e194767af0574ec1
setup.py
setup.py
import sys if sys.version_info < (3,): sys.exit("catimg requires Python 3") from setuptools import setup import versioneer setup( name='catimg', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''Print an image of a cat from Imgur to iTerm2.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/catimg', packages=['catimg'], package_data={'catimg.tests': ['aloha_cat.png']}, long_description=""" catimg Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat in your terminal. NOTE: I do not own the images that you see, nor have I any control over them. You will see some image that is tagged as "cat" on Imgur. That could be anything. I do filter out images that are tagged NSFW, but there are no guarantees that you won't see something you wish you hadn't. Use at your own risk. License: MIT """, entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']}, install_requires=[ 'requests', 'imgurpython', ], license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', ], zip_safe=False, )
import sys if sys.version_info < (3,): sys.exit("catimg requires Python 3") from setuptools import setup import versioneer setup( name='catimg', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''Print an image of a cat from Imgur to iTerm2.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/catimg', packages=['catimg', 'catimg.tests'], package_data={'catimg.tests': ['aloha_cat.png']}, long_description=""" catimg Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat in your terminal. NOTE: I do not own the images that you see, nor have I any control over them. You will see some image that is tagged as "cat" on Imgur. That could be anything. I do filter out images that are tagged NSFW, but there are no guarantees that you won't see something you wish you hadn't. Use at your own risk. License: MIT """, entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']}, install_requires=[ 'requests', 'imgurpython', ], license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', ], zip_safe=False, )
Include the tests in the install
Include the tests in the install
Python
mit
asmeurer/catimg
python
## Code Before: import sys if sys.version_info < (3,): sys.exit("catimg requires Python 3") from setuptools import setup import versioneer setup( name='catimg', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''Print an image of a cat from Imgur to iTerm2.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/catimg', packages=['catimg'], package_data={'catimg.tests': ['aloha_cat.png']}, long_description=""" catimg Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat in your terminal. NOTE: I do not own the images that you see, nor have I any control over them. You will see some image that is tagged as "cat" on Imgur. That could be anything. I do filter out images that are tagged NSFW, but there are no guarantees that you won't see something you wish you hadn't. Use at your own risk. License: MIT """, entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']}, install_requires=[ 'requests', 'imgurpython', ], license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', ], zip_safe=False, ) ## Instruction: Include the tests in the install ## Code After: import sys if sys.version_info < (3,): sys.exit("catimg requires Python 3") from setuptools import setup import versioneer setup( name='catimg', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''Print an image of a cat from Imgur to iTerm2.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/catimg', packages=['catimg', 'catimg.tests'], package_data={'catimg.tests': ['aloha_cat.png']}, long_description=""" catimg Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat in your terminal. NOTE: I do not own the images that you see, nor have I any control over them. You will see some image that is tagged as "cat" on Imgur. That could be anything. I do filter out images that are tagged NSFW, but there are no guarantees that you won't see something you wish you hadn't. Use at your own risk. License: MIT """, entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']}, install_requires=[ 'requests', 'imgurpython', ], license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', ], zip_safe=False, )
... author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/catimg', packages=['catimg', 'catimg.tests'], package_data={'catimg.tests': ['aloha_cat.png']}, long_description=""" catimg ...
fc6202425e0c855dc29980904949b60c0ac48bbf
preparation/tools/build_assets.py
preparation/tools/build_assets.py
from copy import copy from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage def rebuild_from_resource(resource_name: str): resource = resource_by_name(resource_name)() with get_storage(resource_name.replace('Resource', '')) as out_storage: out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_all(): for name in names_registered(): rebuild_from_resource(name)
from copy import copy from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage def rebuild_from_resource(resource_name: str): resource = resource_by_name(resource_name)() trunk = resource_name.replace('Resource', '') with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) print("Finished {} generation".format(trunk)) def rebuild_all(): for name in names_registered(): rebuild_from_resource(name)
Add start/finish debug info while generating
Add start/finish debug info while generating
Python
mit
hatbot-team/hatbot_resources
python
## Code Before: from copy import copy from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage def rebuild_from_resource(resource_name: str): resource = resource_by_name(resource_name)() with get_storage(resource_name.replace('Resource', '')) as out_storage: out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_all(): for name in names_registered(): rebuild_from_resource(name) ## Instruction: Add start/finish debug info while generating ## Code After: from copy import copy from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage def rebuild_from_resource(resource_name: str): resource = resource_by_name(resource_name)() trunk = resource_name.replace('Resource', '') with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) print("Finished {} generation".format(trunk)) def rebuild_all(): for name in names_registered(): rebuild_from_resource(name)
# ... existing code ... def rebuild_from_resource(resource_name: str): resource = resource_by_name(resource_name)() trunk = resource_name.replace('Resource', '') with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) out_storage.clear() for explanation in resource: r = copy(explanation) # ... modified code ... r = functor(r) if r is not None: out_storage.add_entry(r) print("Finished {} generation".format(trunk)) def rebuild_all(): # ... rest of the code ...
45b9d6329eb3ea4d602bd7785b9085d2769dfb70
setup.py
setup.py
from setuptools import setup, find_packages setup( name='bankbarcode', version='0.1.1', packages=find_packages(), url='https://github.com/gisce/bankbarcode', license='GNU Affero General Public License v3', author='GISCE-TI, S.L.', author_email='[email protected]', # We need python-barcode v0.8, to have Code128 (EAN128), not released yet # https://bitbucket.org/whitie/python-barcode/issues/16/pypi-08-release-request dependency_links=[ "https://bitbucket.org/whitie/python-barcode/get/6c22b96a2ca2.zip" ], install_requires=[ 'pybarcode>=0.8b1' ], description='barcodes for financial documents' )
from setuptools import setup, find_packages setup( name='bankbarcode', version='0.1.1', packages=find_packages(), url='https://github.com/gisce/bankbarcode', license='GNU Affero General Public License v3', author='GISCE-TI, S.L.', author_email='[email protected]', # We need python-barcode v0.8, to have Code128 (EAN128), not released yet # https://bitbucket.org/whitie/python-barcode/issues/16/pypi-08-release-request dependency_links=[ "https://bitbucket.org/whitie/python-barcode/get/6c22b96.zip#egg=pybarcode-0.8b1" ], install_requires=[ 'pybarcode>=0.8b1' ], description='barcodes for financial documents' )
Use egg attribute of the links
Use egg attribute of the links To correct install it you must do: $ python setup.py install or $ pip install --process-dependency-links bankbarcode
Python
agpl-3.0
gisce/bankbarcode
python
## Code Before: from setuptools import setup, find_packages setup( name='bankbarcode', version='0.1.1', packages=find_packages(), url='https://github.com/gisce/bankbarcode', license='GNU Affero General Public License v3', author='GISCE-TI, S.L.', author_email='[email protected]', # We need python-barcode v0.8, to have Code128 (EAN128), not released yet # https://bitbucket.org/whitie/python-barcode/issues/16/pypi-08-release-request dependency_links=[ "https://bitbucket.org/whitie/python-barcode/get/6c22b96a2ca2.zip" ], install_requires=[ 'pybarcode>=0.8b1' ], description='barcodes for financial documents' ) ## Instruction: Use egg attribute of the links To correct install it you must do: $ python setup.py install or $ pip install --process-dependency-links bankbarcode ## Code After: from setuptools import setup, find_packages setup( name='bankbarcode', version='0.1.1', packages=find_packages(), url='https://github.com/gisce/bankbarcode', license='GNU Affero General Public License v3', author='GISCE-TI, S.L.', author_email='[email protected]', # We need python-barcode v0.8, to have Code128 (EAN128), not released yet # https://bitbucket.org/whitie/python-barcode/issues/16/pypi-08-release-request dependency_links=[ "https://bitbucket.org/whitie/python-barcode/get/6c22b96.zip#egg=pybarcode-0.8b1" ], install_requires=[ 'pybarcode>=0.8b1' ], description='barcodes for financial documents' )
# ... existing code ... from setuptools import setup, find_packages setup( name='bankbarcode', # ... modified code ... # We need python-barcode v0.8, to have Code128 (EAN128), not released yet # https://bitbucket.org/whitie/python-barcode/issues/16/pypi-08-release-request dependency_links=[ "https://bitbucket.org/whitie/python-barcode/get/6c22b96.zip#egg=pybarcode-0.8b1" ], install_requires=[ 'pybarcode>=0.8b1' # ... rest of the code ...
091a08a8fe30c3cc00c6b85552e47a1b15b807b8
preferences/views.py
preferences/views.py
from django.shortcuts import render # Create your views here. from registration.views import RegistrationView from registration.forms import RegistrationFormUniqueEmail class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail
from django.shortcuts import render from django.views.generic.edit import FormView from registration.forms import RegistrationFormUniqueEmail from registration.backends.default.views import RegistrationView from preferences.forms import PreferencesForm class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail class UserPreferences(FormView): template_name = 'preferences/preferences.html' form_class = PreferencesForm success_url = '/index/' def form_valid(self, form): return super(UserPreferences, self).form_valid(form)
Add userprefs and email reg view
Add userprefs and email reg view
Python
mit
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
python
## Code Before: from django.shortcuts import render # Create your views here. from registration.views import RegistrationView from registration.forms import RegistrationFormUniqueEmail class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail ## Instruction: Add userprefs and email reg view ## Code After: from django.shortcuts import render from django.views.generic.edit import FormView from registration.forms import RegistrationFormUniqueEmail from registration.backends.default.views import RegistrationView from preferences.forms import PreferencesForm class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail class UserPreferences(FormView): template_name = 'preferences/preferences.html' form_class = PreferencesForm success_url = '/index/' def form_valid(self, form): return super(UserPreferences, self).form_valid(form)
... from django.shortcuts import render from django.views.generic.edit import FormView from registration.forms import RegistrationFormUniqueEmail from registration.backends.default.views import RegistrationView from preferences.forms import PreferencesForm class EmailRegistrationView(RegistrationView): form_class = RegistrationFormUniqueEmail class UserPreferences(FormView): template_name = 'preferences/preferences.html' form_class = PreferencesForm success_url = '/index/' def form_valid(self, form): return super(UserPreferences, self).form_valid(form) ...
8c05cb85c47db892dd13abbd91b3948c09b9a954
statsmodels/tools/__init__.py
statsmodels/tools/__init__.py
from tools import add_constant, categorical from datautils import Dataset from statsmodels import NoseWrapper as Tester test = Tester().test
from tools import add_constant, categorical from statsmodels import NoseWrapper as Tester test = Tester().test
Remove import of moved file
REF: Remove import of moved file
Python
bsd-3-clause
josef-pkt/statsmodels,adammenges/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,kiyoto/statsmodels,astocko/statsmodels,musically-ut/statsmodels,bsipocz/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,bzero/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,astocko/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,Averroes/statsmodels,gef756/statsmodels,edhuckle/statsmodels,jseabold/statsmodels,waynenilsen/statsmodels,hainm/statsmodels,bashtage/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,detrout/debian-statsmodels,yarikoptic/pystatsmodels,bavardage/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,bavardage/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,statsmodels/statsmodels,nvoron23/statsmodels,adammenges/statsmodels,wwf5067/statsmodels,yl565/statsmodels,alekz112/statsmodels,waynenilsen/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,alekz112/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,ChadFulton/statsmodels,bavardage/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels,bsipocz/statsmodels,waynenilsen/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,bashtage/statsmodels,Averroes/statsmodels,adammenges/statsmodels,hlin117/statsmodels,wwf5067/statsmodels,gef756/statsmodels,cbmoore/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,phobson/statsmodels,alekz112/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,wdurhamh/statsmodels,musically-ut/statsmodels,kiyoto/statsmodels,phobson/statsmodels,wdurhamh/statsmodels,gef756/statsmodels,kiyoto/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,hainm/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,hlin117/statsmodels,bert9bert/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,jstoxrocky/statsmodels,bzero/statsmodels,yarikoptic/pystatsmodels,yl565/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,huongttlan/statsmodels,Averroes/statsmodels,nvoron23/statsmodels,astocko/statsmodels,wzbozon/statsmodels,bzero/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,yl565/statsmodels,bashtage/statsmodels,jseabold/statsmodels,kiyoto/statsmodels,phobson/statsmodels,bzero/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,ChadFulton/statsmodels,DonBeo/statsmodels,yarikoptic/pystatsmodels,nguyentu1602/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,adammenges/statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,jstoxrocky/statsmodels,yl565/statsmodels,bashtage/statsmodels,astocko/statsmodels,nguyentu1602/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,wzbozon/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,statsmodels/statsmodels,hainm/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,wkfwkf/statsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,detrout/debian-statsmodels,phobson/statsmodels,wdurhamh/statsmodels
python
## Code Before: from tools import add_constant, categorical from datautils import Dataset from statsmodels import NoseWrapper as Tester test = Tester().test ## Instruction: REF: Remove import of moved file ## Code After: from tools import add_constant, categorical from statsmodels import NoseWrapper as Tester test = Tester().test
... from tools import add_constant, categorical from statsmodels import NoseWrapper as Tester test = Tester().test ...
97f25327e506df85649b4b59ff94c625d8cc3e31
xamoom-android-sdk-app/xamoom-android-sdk/src/main/java/com/xamoom/android/mapping/Menu.java
xamoom-android-sdk-app/xamoom-android-sdk/src/main/java/com/xamoom/android/mapping/Menu.java
package com.xamoom.android.mapping; import com.google.gson.annotations.SerializedName; import com.xamoom.android.APICallback; import com.xamoom.android.mapping.ContentBlocks.MenuItem; import java.util.List; /** * Used for mapping menu responses from the xamoom-cloud-api. * Menu will have only a list of MenuItems. * * @author Raphael Seher * * @see MenuItem * @see com.xamoom.android.XamoomEndUserApi#getContentbyIdFull(String, boolean, boolean, String, boolean, APICallback) * @see com.xamoom.android.XamoomEndUserApi#getContentByLocationIdentifier(String, String, boolean, boolean, String, APICallback) */ public class Menu { private List<MenuItem> items; @Override public String toString() { String output = "items: "; for (MenuItem item : items) { output += item.toString(); } return output; } public List<MenuItem> getItems() { return items; } }
package com.xamoom.android.mapping; import com.xamoom.android.APICallback; import java.util.List; /** * Used for mapping menu responses from the xamoom-cloud-api. * Menu will have only a list of MenuItems. * * @author Raphael Seher * * @see MenuItem * @see com.xamoom.android.XamoomEndUserApi#getContentbyId(String, boolean, boolean, String, boolean, boolean, APICallback) * @see com.xamoom.android.XamoomEndUserApi#getContentByLocationIdentifier(String, String, boolean, boolean, String, APICallback) */ public class Menu { private List<MenuItem> items; @Override public String toString() { String output = "items: "; for (MenuItem item : items) { output += item.toString(); } return output; } public List<MenuItem> getItems() { return items; } }
Change category from String to int
Change category from String to int
Java
mit
xamoom/xamoom-android-sdk,xamoom/xamoom-android-sdk
java
## Code Before: package com.xamoom.android.mapping; import com.google.gson.annotations.SerializedName; import com.xamoom.android.APICallback; import com.xamoom.android.mapping.ContentBlocks.MenuItem; import java.util.List; /** * Used for mapping menu responses from the xamoom-cloud-api. * Menu will have only a list of MenuItems. * * @author Raphael Seher * * @see MenuItem * @see com.xamoom.android.XamoomEndUserApi#getContentbyIdFull(String, boolean, boolean, String, boolean, APICallback) * @see com.xamoom.android.XamoomEndUserApi#getContentByLocationIdentifier(String, String, boolean, boolean, String, APICallback) */ public class Menu { private List<MenuItem> items; @Override public String toString() { String output = "items: "; for (MenuItem item : items) { output += item.toString(); } return output; } public List<MenuItem> getItems() { return items; } } ## Instruction: Change category from String to int ## Code After: package com.xamoom.android.mapping; import com.xamoom.android.APICallback; import java.util.List; /** * Used for mapping menu responses from the xamoom-cloud-api. * Menu will have only a list of MenuItems. * * @author Raphael Seher * * @see MenuItem * @see com.xamoom.android.XamoomEndUserApi#getContentbyId(String, boolean, boolean, String, boolean, boolean, APICallback) * @see com.xamoom.android.XamoomEndUserApi#getContentByLocationIdentifier(String, String, boolean, boolean, String, APICallback) */ public class Menu { private List<MenuItem> items; @Override public String toString() { String output = "items: "; for (MenuItem item : items) { output += item.toString(); } return output; } public List<MenuItem> getItems() { return items; } }
// ... existing code ... package com.xamoom.android.mapping; import com.xamoom.android.APICallback; import java.util.List; // ... modified code ... * @author Raphael Seher * * @see MenuItem * @see com.xamoom.android.XamoomEndUserApi#getContentbyId(String, boolean, boolean, String, boolean, boolean, APICallback) * @see com.xamoom.android.XamoomEndUserApi#getContentByLocationIdentifier(String, String, boolean, boolean, String, APICallback) */ public class Menu { // ... rest of the code ...
42a1aaba8daa253b99f444a512f8231db47dfbb2
helpers.py
helpers.py
import array import numpy as np def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples
import array import numpy as np import pandas as pd def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples def convert_to_labeled_df(df): """ Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. This Data Frame can be used to easily get accuarcy values for cross-validation """ result = [] for idx, row in df.iterrows(): context = row.Context result.append([context, row.iloc[1], 1]) for distractor in row.iloc[2:]: result.append([context, distractor, 0]) return pd.DataFrame(result, columns=["Context", "Utterance", "Label"])
Add dataset conversion helper function
Add dataset conversion helper function
Python
mit
AotY/chatbot-retrieval,LepiorzDaniel/test2
python
## Code Before: import array import numpy as np def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples ## Instruction: Add dataset conversion helper function ## Code After: import array import numpy as np import pandas as pd def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples def convert_to_labeled_df(df): """ Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. This Data Frame can be used to easily get accuarcy values for cross-validation """ result = [] for idx, row in df.iterrows(): context = row.Context result.append([context, row.iloc[1], 1]) for distractor in row.iloc[2:]: result.append([context, distractor, 0]) return pd.DataFrame(result, columns=["Context", "Utterance", "Label"])
// ... existing code ... import array import numpy as np import pandas as pd def load_glove_vectors(filename, vocab=None): // ... modified code ... if label in predictions[:n]: num_correct += 1 return num_correct/num_examples def convert_to_labeled_df(df): """ Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. This Data Frame can be used to easily get accuarcy values for cross-validation """ result = [] for idx, row in df.iterrows(): context = row.Context result.append([context, row.iloc[1], 1]) for distractor in row.iloc[2:]: result.append([context, distractor, 0]) return pd.DataFrame(result, columns=["Context", "Utterance", "Label"]) // ... rest of the code ...