commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
47593fae71deb378bd60a14d1b6f4a3a2bb98bf6
|
pitz.py
|
pitz.py
|
import sys
import subprocess
cmd = sys.argv[1]
new_args = sys.argv[2:] or []
subprocess.call(["pitz-%s" % cmd] + new_args)
|
import sys
import subprocess
def _help():
subprocess.call(['pitz-help'])
sys.exit(1)
if len(sys.argv) < 2:
_help()
cmd = sys.argv[1]
new_args = sys.argv[2:] or []
try:
subprocess.call(["pitz-%s" % cmd] + new_args)
except OSError as exc:
_help()
|
Add at least a minimal exception handling (missing subcommand).
|
Add at least a minimal exception handling (missing subcommand).
|
Python
|
bsd-3-clause
|
mw44118/pitz,mw44118/pitz,mw44118/pitz
|
python
|
## Code Before:
import sys
import subprocess
cmd = sys.argv[1]
new_args = sys.argv[2:] or []
subprocess.call(["pitz-%s" % cmd] + new_args)
## Instruction:
Add at least a minimal exception handling (missing subcommand).
## Code After:
import sys
import subprocess
def _help():
subprocess.call(['pitz-help'])
sys.exit(1)
if len(sys.argv) < 2:
_help()
cmd = sys.argv[1]
new_args = sys.argv[2:] or []
try:
subprocess.call(["pitz-%s" % cmd] + new_args)
except OSError as exc:
_help()
|
# ... existing code ...
import sys
import subprocess
def _help():
subprocess.call(['pitz-help'])
sys.exit(1)
if len(sys.argv) < 2:
_help()
cmd = sys.argv[1]
new_args = sys.argv[2:] or []
try:
subprocess.call(["pitz-%s" % cmd] + new_args)
except OSError as exc:
_help()
# ... rest of the code ...
|
64c02a8bb7863ee9606b7927540fbf71d806a6e1
|
sitecustomize.py
|
sitecustomize.py
|
import os
import sys
def patch_process_for_coverage():
# patch multiprocessing module to get coverage
# https://bitbucket.org/ned/coveragepy/issue/117/enable-coverage-measurement-of-code-run-by
from coverage.collector import Collector
from coverage import coverage
import multiprocessing
# detect if coverage was running in forked process
if sys.version_info >= (3, 4):
klass = multiprocessing.process.BaseProcess
else:
klass = multiprocessing.Process
if Collector._collectors:
original = multiprocessing.Process._bootstrap
class ProcessWithCoverage(multiprocessing.Process):
def _bootstrap(self):
cov = coverage(
data_suffix=True,
config_file=os.getenv('COVERAGE_PROCESS_START', True)
)
cov.start()
try:
return original(self)
finally:
cov.stop()
cov.save()
if sys.version_info >= (3, 4):
klass._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
if os.getenv('FULL_COVERAGE', 'false') == 'true':
try:
import coverage
coverage.process_startup()
patch_process_for_coverage()
except ImportError:
pass
|
import os
import sys
def patch_process_for_coverage():
# patch multiprocessing module to get coverage
# https://bitbucket.org/ned/coveragepy/issue/117/enable-coverage-measurement-of-code-run-by
from coverage.collector import Collector
from coverage import coverage
import multiprocessing
# detect if coverage was running in forked process
if sys.version_info >= (3, 4):
klass = multiprocessing.process.BaseProcess
else:
klass = multiprocessing.Process
if Collector._collectors:
original = multiprocessing.Process._bootstrap
class ProcessWithCoverage(multiprocessing.Process):
def _bootstrap(self):
cov = coverage(
data_suffix=True,
config_file=os.getenv('COVERAGE_PROCESS_START', True)
)
cov.start()
try:
return original(self)
finally:
cov.stop()
cov.save()
if sys.version_info >= (3, 4):
klass._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
if os.getenv('FULL_COVERAGE', 'false') == 'true':
try:
import coverage
coverage.process_startup()
patch_process_for_coverage()
except ImportError:
pass
|
Fix scope when setting up multiprocessing with coverage
|
Fix scope when setting up multiprocessing with coverage
|
Python
|
apache-2.0
|
Wattpad/luigi,jamesmcm/luigi,wakamori/luigi,casey-green/luigi,mbruggmann/luigi,adaitche/luigi,rayrrr/luigi,stroykova/luigi,riga/luigi,fabriziodemaria/luigi,mfcabrera/luigi,mfcabrera/luigi,Houzz/luigi,h3biomed/luigi,linsomniac/luigi,soxofaan/luigi,oldpa/luigi,edx/luigi,jw0201/luigi,foursquare/luigi,humanlongevity/luigi,Tarrasch/luigi,edx/luigi,samepage-labs/luigi,dstandish/luigi,Tarrasch/luigi,rayrrr/luigi,jw0201/luigi,bmaggard/luigi,spotify/luigi,mbruggmann/luigi,stroykova/luigi,fabriziodemaria/luigi,jw0201/luigi,bmaggard/luigi,mfcabrera/luigi,foursquare/luigi,samuell/luigi,ehdr/luigi,jamesmcm/luigi,rizzatti/luigi,javrasya/luigi,dstandish/luigi,Houzz/luigi,ivannotes/luigi,Houzz/luigi,ivannotes/luigi,Tarrasch/luigi,republic-analytics/luigi,mbruggmann/luigi,lungetech/luigi,riga/luigi,rizzatti/luigi,lungetech/luigi,mbruggmann/luigi,ContextLogic/luigi,stroykova/luigi,oldpa/luigi,soxofaan/luigi,bmaggard/luigi,spotify/luigi,h3biomed/luigi,samepage-labs/luigi,casey-green/luigi,ContextLogic/luigi,fabriziodemaria/luigi,lungetech/luigi,Magnetic/luigi,Wattpad/luigi,jamesmcm/luigi,rayrrr/luigi,rizzatti/luigi,rayrrr/luigi,stroykova/luigi,PeteW/luigi,oldpa/luigi,dstandish/luigi,bmaggard/luigi,soxofaan/luigi,mfcabrera/luigi,wakamori/luigi,PeteW/luigi,thejens/luigi,javrasya/luigi,ContextLogic/luigi,jw0201/luigi,h3biomed/luigi,wakamori/luigi,republic-analytics/luigi,spotify/luigi,jamesmcm/luigi,republic-analytics/luigi,ContextLogic/luigi,humanlongevity/luigi,ivannotes/luigi,ehdr/luigi,republic-analytics/luigi,PeteW/luigi,fabriziodemaria/luigi,thejens/luigi,thejens/luigi,ehdr/luigi,ivannotes/luigi,PeteW/luigi,dlstadther/luigi,edx/luigi,linsomniac/luigi,adaitche/luigi,soxofaan/luigi,wakamori/luigi,linsomniac/luigi,javrasya/luigi,humanlongevity/luigi,javrasya/luigi,dlstadther/luigi,oldpa/luigi,rizzatti/luigi,linsomniac/luigi,Houzz/luigi,riga/luigi,samuell/luigi,spotify/luigi,dstandish/luigi,casey-green/luigi,adaitche/luigi,samepage-labs/luigi,ehdr/luigi,foursquare/luigi,foursquare/luigi,samuell/luigi,Tarrasch/luigi,adaitche/luigi,edx/luigi,samepage-labs/luigi,Wattpad/luigi,Magnetic/luigi,riga/luigi,samuell/luigi,Magnetic/luigi,Magnetic/luigi,lungetech/luigi,casey-green/luigi,dlstadther/luigi,h3biomed/luigi,humanlongevity/luigi,dlstadther/luigi,thejens/luigi
|
python
|
## Code Before:
import os
import sys
def patch_process_for_coverage():
# patch multiprocessing module to get coverage
# https://bitbucket.org/ned/coveragepy/issue/117/enable-coverage-measurement-of-code-run-by
from coverage.collector import Collector
from coverage import coverage
import multiprocessing
# detect if coverage was running in forked process
if sys.version_info >= (3, 4):
klass = multiprocessing.process.BaseProcess
else:
klass = multiprocessing.Process
if Collector._collectors:
original = multiprocessing.Process._bootstrap
class ProcessWithCoverage(multiprocessing.Process):
def _bootstrap(self):
cov = coverage(
data_suffix=True,
config_file=os.getenv('COVERAGE_PROCESS_START', True)
)
cov.start()
try:
return original(self)
finally:
cov.stop()
cov.save()
if sys.version_info >= (3, 4):
klass._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
if os.getenv('FULL_COVERAGE', 'false') == 'true':
try:
import coverage
coverage.process_startup()
patch_process_for_coverage()
except ImportError:
pass
## Instruction:
Fix scope when setting up multiprocessing with coverage
## Code After:
import os
import sys
def patch_process_for_coverage():
# patch multiprocessing module to get coverage
# https://bitbucket.org/ned/coveragepy/issue/117/enable-coverage-measurement-of-code-run-by
from coverage.collector import Collector
from coverage import coverage
import multiprocessing
# detect if coverage was running in forked process
if sys.version_info >= (3, 4):
klass = multiprocessing.process.BaseProcess
else:
klass = multiprocessing.Process
if Collector._collectors:
original = multiprocessing.Process._bootstrap
class ProcessWithCoverage(multiprocessing.Process):
def _bootstrap(self):
cov = coverage(
data_suffix=True,
config_file=os.getenv('COVERAGE_PROCESS_START', True)
)
cov.start()
try:
return original(self)
finally:
cov.stop()
cov.save()
if sys.version_info >= (3, 4):
klass._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
if os.getenv('FULL_COVERAGE', 'false') == 'true':
try:
import coverage
coverage.process_startup()
patch_process_for_coverage()
except ImportError:
pass
|
...
cov.stop()
cov.save()
if sys.version_info >= (3, 4):
klass._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
if os.getenv('FULL_COVERAGE', 'false') == 'true':
...
|
7662ebe6de73bbada1f216590f3e8b15b496a46b
|
Src/lib/utils/swap.c
|
Src/lib/utils/swap.c
|
/* Swap generic function */
void swap(void *vp1, void *vp2, int size)
{
char buffer[size];
memcpy(buffer, vp1, size);
memcpy(vp1,vp2,size);
memcpy(vp2,buffer,size);
}
|
/* Swap generic function */
void swap(void *vp1, void *vp2, int size)
{
char* buffer = (char*) malloc(size*sizeof(char));
memcpy(buffer, vp1, size);
memcpy(vp1,vp2,size);
memcpy(vp2,buffer,size);
free(buffer);
}
|
Use malloc/free as compile failed under MSVC. Ask rpmuller to check if okay.
|
Use malloc/free as compile failed under MSVC. Ask rpmuller to check if okay.
git-svn-id: 6e15fd90c9d760a7473dfa00402abf17076c345c@188 64417113-1622-0410-aef8-ef15d1a3721e
|
C
|
bsd-3-clause
|
berquist/PyQuante,berquist/PyQuante,berquist/PyQuante,berquist/PyQuante,berquist/PyQuante,berquist/PyQuante
|
c
|
## Code Before:
/* Swap generic function */
void swap(void *vp1, void *vp2, int size)
{
char buffer[size];
memcpy(buffer, vp1, size);
memcpy(vp1,vp2,size);
memcpy(vp2,buffer,size);
}
## Instruction:
Use malloc/free as compile failed under MSVC. Ask rpmuller to check if okay.
git-svn-id: 6e15fd90c9d760a7473dfa00402abf17076c345c@188 64417113-1622-0410-aef8-ef15d1a3721e
## Code After:
/* Swap generic function */
void swap(void *vp1, void *vp2, int size)
{
char* buffer = (char*) malloc(size*sizeof(char));
memcpy(buffer, vp1, size);
memcpy(vp1,vp2,size);
memcpy(vp2,buffer,size);
free(buffer);
}
|
# ... existing code ...
/* Swap generic function */
void swap(void *vp1, void *vp2, int size)
{
char* buffer = (char*) malloc(size*sizeof(char));
memcpy(buffer, vp1, size);
memcpy(vp1,vp2,size);
memcpy(vp2,buffer,size);
free(buffer);
}
# ... rest of the code ...
|
832fecfe5bfc8951c0d302c2f913a81acfbc657c
|
solarnmf_main_ts.py
|
solarnmf_main_ts.py
|
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
|
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
|
Fix for input options in make_t_matrix function
|
Fix for input options in make_t_matrix function
|
Python
|
mit
|
wtbarnes/solarnmf
|
python
|
## Code Before:
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
## Instruction:
Fix for input options in make_t_matrix function
## Code After:
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
|
// ... existing code ...
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
// ... modified code ...
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
// ... rest of the code ...
|
db76777575162aab69aec9429455f2e7d841a605
|
lambdas/dynamo_to_sns/dynamo_to_sns.py
|
lambdas/dynamo_to_sns/dynamo_to_sns.py
|
import json
import os
from sns_utils import publish_sns_message
def main(event, _):
print(f'Received event:\n{event}')
stream_topic_map = json.loads(os.environ["STREAM_TOPIC_MAP"])
new_image = event['Records'][0]['dynamodb']['NewImage']
topic_arn = stream_topic_map[event['Records'][0]['eventSourceARN']]
publish_sns_message(topic_arn,new_image)
|
import os
from sns_utils import publish_sns_message
def main(event, _):
print(f'Received event:\n{event}')
stream_topic_map = os.environ["STREAM_TOPIC_MAP"]
new_image = event['Records'][0]['dynamodb']['NewImage']
topic_arn = stream_topic_map[event['Records'][0]['eventSourceARN']]
publish_sns_message(topic_arn,new_image)
|
Fix loading of map from environment variables
|
Fix loading of map from environment variables
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
python
|
## Code Before:
import json
import os
from sns_utils import publish_sns_message
def main(event, _):
print(f'Received event:\n{event}')
stream_topic_map = json.loads(os.environ["STREAM_TOPIC_MAP"])
new_image = event['Records'][0]['dynamodb']['NewImage']
topic_arn = stream_topic_map[event['Records'][0]['eventSourceARN']]
publish_sns_message(topic_arn,new_image)
## Instruction:
Fix loading of map from environment variables
## Code After:
import os
from sns_utils import publish_sns_message
def main(event, _):
print(f'Received event:\n{event}')
stream_topic_map = os.environ["STREAM_TOPIC_MAP"]
new_image = event['Records'][0]['dynamodb']['NewImage']
topic_arn = stream_topic_map[event['Records'][0]['eventSourceARN']]
publish_sns_message(topic_arn,new_image)
|
...
import os
from sns_utils import publish_sns_message
...
def main(event, _):
print(f'Received event:\n{event}')
stream_topic_map = os.environ["STREAM_TOPIC_MAP"]
new_image = event['Records'][0]['dynamodb']['NewImage']
topic_arn = stream_topic_map[event['Records'][0]['eventSourceARN']]
publish_sns_message(topic_arn,new_image)
...
|
72e5c6bb8c0df5b6b0812155474065b30f9e95e2
|
firmware/BlynkWidgets.h
|
firmware/BlynkWidgets.h
|
/**
* @file BlynkWidgets.h
* @author Volodymyr Shymanskyy
* @license This project is released under the MIT License (MIT)
* @copyright Copyright (c) 2015 Volodymyr Shymanskyy
* @date Mar 2015
* @brief
*/
#include "WidgetLED.h"
#include "WidgetLCD.h"
//#include <WidgetTerminal.h> // No Print.h on Spark?
#include "WidgetBridge.h"
// Cannot auto-include WidgetSD, as it has library dependency
|
/**
* @file BlynkWidgets.h
* @author Volodymyr Shymanskyy
* @license This project is released under the MIT License (MIT)
* @copyright Copyright (c) 2015 Volodymyr Shymanskyy
* @date Mar 2015
* @brief
*/
#include "WidgetLED.h"
#include "WidgetLCD.h"
#include "WidgetTerminal.h"
#include "WidgetBridge.h"
// Cannot auto-include WidgetSD, as it has library dependency
|
Add terminal to common widgets
|
Add terminal to common widgets
|
C
|
mit
|
vshymanskyy/blynk-library-spark,domo-connect/blynk-library-spark,yaneexy/blynk-library-spark,vshymanskyy/blynk-library-spark,domo-connect/blynk-library-spark,yaneexy/blynk-library-spark,chieftuscan/blynk-library-spark,chieftuscan/blynk-library-spark
|
c
|
## Code Before:
/**
* @file BlynkWidgets.h
* @author Volodymyr Shymanskyy
* @license This project is released under the MIT License (MIT)
* @copyright Copyright (c) 2015 Volodymyr Shymanskyy
* @date Mar 2015
* @brief
*/
#include "WidgetLED.h"
#include "WidgetLCD.h"
//#include <WidgetTerminal.h> // No Print.h on Spark?
#include "WidgetBridge.h"
// Cannot auto-include WidgetSD, as it has library dependency
## Instruction:
Add terminal to common widgets
## Code After:
/**
* @file BlynkWidgets.h
* @author Volodymyr Shymanskyy
* @license This project is released under the MIT License (MIT)
* @copyright Copyright (c) 2015 Volodymyr Shymanskyy
* @date Mar 2015
* @brief
*/
#include "WidgetLED.h"
#include "WidgetLCD.h"
#include "WidgetTerminal.h"
#include "WidgetBridge.h"
// Cannot auto-include WidgetSD, as it has library dependency
|
# ... existing code ...
#include "WidgetLED.h"
#include "WidgetLCD.h"
#include "WidgetTerminal.h"
#include "WidgetBridge.h"
// Cannot auto-include WidgetSD, as it has library dependency
# ... rest of the code ...
|
e58647aaf0b3f2e9ebfdcc98206edd3e23680365
|
flint/src/jp/oist/flint/rpc/Server.java
|
flint/src/jp/oist/flint/rpc/Server.java
|
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.rpc;
import com.sun.net.httpserver.HttpServer;
import java.io.IOException;
import java.net.BindException;
import java.net.InetSocketAddress;
public class Server {
static final int PORT = 20465;
private final HttpServer mHttpServer;
public Server(ICallee callee) throws BindException, IOException {
InetSocketAddress isa = new InetSocketAddress(PORT);
mHttpServer = HttpServer.create(isa, 0); // use the default value for the socket backlog
mHttpServer.createContext("/open-model", new OpenModelRequestHandler(callee));
mHttpServer.createContext("/run", new RunRequestHandler(callee));
}
public void start() {
mHttpServer.setExecutor(null); // create a default executor
mHttpServer.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
mHttpServer.stop(0);
}
});
}
}
|
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.rpc;
import com.sun.net.httpserver.HttpServer;
import java.io.IOException;
import java.net.BindException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
public class Server {
static final int PORT = 20465;
private final HttpServer mHttpServer;
public Server(ICallee callee) throws BindException, IOException {
InetSocketAddress isa = new InetSocketAddress(InetAddress.getLoopbackAddress(), PORT);
mHttpServer = HttpServer.create(isa, 0); // use the default value for the socket backlog
mHttpServer.createContext("/open-model", new OpenModelRequestHandler(callee));
mHttpServer.createContext("/run", new RunRequestHandler(callee));
}
public void start() {
mHttpServer.setExecutor(null); // create a default executor
mHttpServer.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
mHttpServer.stop(0);
}
});
}
}
|
Allow connection from localhost only
|
Allow connection from localhost only
|
Java
|
mit
|
flintproject/Flint,flintproject/Flint
|
java
|
## Code Before:
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.rpc;
import com.sun.net.httpserver.HttpServer;
import java.io.IOException;
import java.net.BindException;
import java.net.InetSocketAddress;
public class Server {
static final int PORT = 20465;
private final HttpServer mHttpServer;
public Server(ICallee callee) throws BindException, IOException {
InetSocketAddress isa = new InetSocketAddress(PORT);
mHttpServer = HttpServer.create(isa, 0); // use the default value for the socket backlog
mHttpServer.createContext("/open-model", new OpenModelRequestHandler(callee));
mHttpServer.createContext("/run", new RunRequestHandler(callee));
}
public void start() {
mHttpServer.setExecutor(null); // create a default executor
mHttpServer.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
mHttpServer.stop(0);
}
});
}
}
## Instruction:
Allow connection from localhost only
## Code After:
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.rpc;
import com.sun.net.httpserver.HttpServer;
import java.io.IOException;
import java.net.BindException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
public class Server {
static final int PORT = 20465;
private final HttpServer mHttpServer;
public Server(ICallee callee) throws BindException, IOException {
InetSocketAddress isa = new InetSocketAddress(InetAddress.getLoopbackAddress(), PORT);
mHttpServer = HttpServer.create(isa, 0); // use the default value for the socket backlog
mHttpServer.createContext("/open-model", new OpenModelRequestHandler(callee));
mHttpServer.createContext("/run", new RunRequestHandler(callee));
}
public void start() {
mHttpServer.setExecutor(null); // create a default executor
mHttpServer.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
mHttpServer.stop(0);
}
});
}
}
|
// ... existing code ...
import com.sun.net.httpserver.HttpServer;
import java.io.IOException;
import java.net.BindException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
public class Server {
// ... modified code ...
private final HttpServer mHttpServer;
public Server(ICallee callee) throws BindException, IOException {
InetSocketAddress isa = new InetSocketAddress(InetAddress.getLoopbackAddress(), PORT);
mHttpServer = HttpServer.create(isa, 0); // use the default value for the socket backlog
mHttpServer.createContext("/open-model", new OpenModelRequestHandler(callee));
mHttpServer.createContext("/run", new RunRequestHandler(callee));
// ... rest of the code ...
|
56aa7fa21b218e047e9f3d7c2239aa6a22d9a5b1
|
kombu/__init__.py
|
kombu/__init__.py
|
"""AMQP Messaging Framework for Python"""
VERSION = (1, 0, 0, "rc4")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "[email protected]"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext"
import os
if not os.environ.get("KOMBU_NO_EVAL", False):
from kombu.connection import BrokerConnection
from kombu.entity import Exchange, Queue
from kombu.messaging import Consumer, Producer
|
"""AMQP Messaging Framework for Python"""
VERSION = (1, 0, 0, "rc4")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "[email protected]"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext en"
import os
import sys
if not os.environ.get("KOMBU_NO_EVAL", False):
# Lazy loading.
# - See werkzeug/__init__.py for the rationale behind this.
from types import ModuleType
all_by_module = {
"kombu.connection": ["BrokerConnection"],
"kombu.entity": ["Exchange", "Queue"],
"kombu.messaging": ["Consumer", "Producer"],
}
object_origins = {}
for module, items in all_by_module.iteritems():
for item in items:
object_origins[item] = module
class module(ModuleType):
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
result = list(new_module.__all__)
result.extend(("__file__", "__path__", "__doc__", "__all__",
"__docformat__", "__name__", "__path__", "VERSION",
"__package__", "__version__", "__author__",
"__contact__", "__homepage__", "__docformat__"))
return result
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules[__name__]
new_module = sys.modules[__name__] = module(__name__)
new_module.__dict__.update({
"__file__": __file__,
"__path__": __path__,
"__doc__": __doc__,
"__all__": tuple(object_origins),
"__version__": __version__,
"__author__": __author__,
"__contact__": __contact__,
"__homepage__": __homepage__,
"__docformat__": __docformat__,
"VERSION": VERSION})
|
Load kombu root module lazily
|
Load kombu root module lazily
|
Python
|
bsd-3-clause
|
urbn/kombu,depop/kombu,bmbouter/kombu,WoLpH/kombu,ZoranPavlovic/kombu,depop/kombu,mathom/kombu,xujun10110/kombu,romank0/kombu,xujun10110/kombu,alex/kombu,numb3r3/kombu,alex/kombu,andresriancho/kombu,daevaorn/kombu,daevaorn/kombu,iris-edu-int/kombu,ZoranPavlovic/kombu,WoLpH/kombu,cce/kombu,mverrilli/kombu,disqus/kombu,cce/kombu,Elastica/kombu,numb3r3/kombu,Elastica/kombu,pantheon-systems/kombu,tkanemoto/kombu,romank0/kombu,bmbouter/kombu,iris-edu-int/kombu,disqus/kombu,andresriancho/kombu,jindongh/kombu,celery/kombu,tkanemoto/kombu,mathom/kombu,pantheon-systems/kombu,mverrilli/kombu,jindongh/kombu
|
python
|
## Code Before:
"""AMQP Messaging Framework for Python"""
VERSION = (1, 0, 0, "rc4")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "[email protected]"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext"
import os
if not os.environ.get("KOMBU_NO_EVAL", False):
from kombu.connection import BrokerConnection
from kombu.entity import Exchange, Queue
from kombu.messaging import Consumer, Producer
## Instruction:
Load kombu root module lazily
## Code After:
"""AMQP Messaging Framework for Python"""
VERSION = (1, 0, 0, "rc4")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "[email protected]"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext en"
import os
import sys
if not os.environ.get("KOMBU_NO_EVAL", False):
# Lazy loading.
# - See werkzeug/__init__.py for the rationale behind this.
from types import ModuleType
all_by_module = {
"kombu.connection": ["BrokerConnection"],
"kombu.entity": ["Exchange", "Queue"],
"kombu.messaging": ["Consumer", "Producer"],
}
object_origins = {}
for module, items in all_by_module.iteritems():
for item in items:
object_origins[item] = module
class module(ModuleType):
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
result = list(new_module.__all__)
result.extend(("__file__", "__path__", "__doc__", "__all__",
"__docformat__", "__name__", "__path__", "VERSION",
"__package__", "__version__", "__author__",
"__contact__", "__homepage__", "__docformat__"))
return result
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules[__name__]
new_module = sys.modules[__name__] = module(__name__)
new_module.__dict__.update({
"__file__": __file__,
"__path__": __path__,
"__doc__": __doc__,
"__all__": tuple(object_origins),
"__version__": __version__,
"__author__": __author__,
"__contact__": __contact__,
"__homepage__": __homepage__,
"__docformat__": __docformat__,
"VERSION": VERSION})
|
# ... existing code ...
__author__ = "Ask Solem"
__contact__ = "[email protected]"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext en"
import os
import sys
if not os.environ.get("KOMBU_NO_EVAL", False):
# Lazy loading.
# - See werkzeug/__init__.py for the rationale behind this.
from types import ModuleType
all_by_module = {
"kombu.connection": ["BrokerConnection"],
"kombu.entity": ["Exchange", "Queue"],
"kombu.messaging": ["Consumer", "Producer"],
}
object_origins = {}
for module, items in all_by_module.iteritems():
for item in items:
object_origins[item] = module
class module(ModuleType):
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
result = list(new_module.__all__)
result.extend(("__file__", "__path__", "__doc__", "__all__",
"__docformat__", "__name__", "__path__", "VERSION",
"__package__", "__version__", "__author__",
"__contact__", "__homepage__", "__docformat__"))
return result
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules[__name__]
new_module = sys.modules[__name__] = module(__name__)
new_module.__dict__.update({
"__file__": __file__,
"__path__": __path__,
"__doc__": __doc__,
"__all__": tuple(object_origins),
"__version__": __version__,
"__author__": __author__,
"__contact__": __contact__,
"__homepage__": __homepage__,
"__docformat__": __docformat__,
"VERSION": VERSION})
# ... rest of the code ...
|
17a18f72e9e2a7df43d2dafe77a17bfe4777d7aa
|
avena/image.py
|
avena/image.py
|
'''Read and write image files as NumPy arrays'''
from numpy import asarray, float32
from PIL import Image
from . import np
from . import utils
_DEFAULT_DTYPE = float32
_PIL_RGB = {
'R': 0,
'G': 1,
'B': 2,
}
def get_channels(img):
'''Return a list of channels of an image array.'''
if utils.depth(img) == 1:
yield img
else:
for i in xrange(utils.depth(img)):
yield img[:, :, i]
def read(filename, dtype=_DEFAULT_DTYPE):
'''Read an image file as an array.'''
img = Image.open(filename)
arr = asarray(img, dtype=dtype)
arr = utils.swap_rgb(arr, _PIL_RGB)
return arr
def _pil_save(img, filename):
pil_img = Image.fromarray(img)
pil_img.save(filename)
return
def save(img, filename, random=False):
'''Save an image array and return its path.'''
if random:
newfile = utils.rand_filename(filename)
else:
newfile = filename
np.normalize(img)
uint8img = np.to_uint8(img)
_pil_save(uint8img, newfile)
return newfile
if __name__ == '__main__':
pass
|
'''Read and write image files as NumPy arrays'''
from numpy import asarray, float32
from PIL import Image
from . import np
from . import utils
_DEFAULT_DTYPE = float32
_PIL_RGB = {
'R': 0,
'G': 1,
'B': 2,
}
def get_channels(img):
'''Return a list of channels of an image array.'''
if utils.depth(img) == 1:
yield img
else:
for i in xrange(utils.depth(img)):
yield img[:, :, i]
def read(filename, dtype=_DEFAULT_DTYPE):
'''Read an image file as an array.'''
img = Image.open(filename)
arr = asarray(img, dtype=dtype)
arr = utils.swap_rgb(arr, _PIL_RGB)
return arr
def _pil_save(img, filename):
pil_img = Image.fromarray(img)
pil_img.save(filename)
return
def save(img, filename, random=False, ext=None):
'''Save an image array and return its path.'''
if random:
newfile = utils.rand_filename(filename, ext=ext)
else:
newfile = filename
np.normalize(img)
uint8img = np.to_uint8(img)
_pil_save(uint8img, newfile)
return newfile
if __name__ == '__main__':
pass
|
Add an extension parameter to the save function.
|
Add an extension parameter to the save function.
|
Python
|
isc
|
eliteraspberries/avena
|
python
|
## Code Before:
'''Read and write image files as NumPy arrays'''
from numpy import asarray, float32
from PIL import Image
from . import np
from . import utils
_DEFAULT_DTYPE = float32
_PIL_RGB = {
'R': 0,
'G': 1,
'B': 2,
}
def get_channels(img):
'''Return a list of channels of an image array.'''
if utils.depth(img) == 1:
yield img
else:
for i in xrange(utils.depth(img)):
yield img[:, :, i]
def read(filename, dtype=_DEFAULT_DTYPE):
'''Read an image file as an array.'''
img = Image.open(filename)
arr = asarray(img, dtype=dtype)
arr = utils.swap_rgb(arr, _PIL_RGB)
return arr
def _pil_save(img, filename):
pil_img = Image.fromarray(img)
pil_img.save(filename)
return
def save(img, filename, random=False):
'''Save an image array and return its path.'''
if random:
newfile = utils.rand_filename(filename)
else:
newfile = filename
np.normalize(img)
uint8img = np.to_uint8(img)
_pil_save(uint8img, newfile)
return newfile
if __name__ == '__main__':
pass
## Instruction:
Add an extension parameter to the save function.
## Code After:
'''Read and write image files as NumPy arrays'''
from numpy import asarray, float32
from PIL import Image
from . import np
from . import utils
_DEFAULT_DTYPE = float32
_PIL_RGB = {
'R': 0,
'G': 1,
'B': 2,
}
def get_channels(img):
'''Return a list of channels of an image array.'''
if utils.depth(img) == 1:
yield img
else:
for i in xrange(utils.depth(img)):
yield img[:, :, i]
def read(filename, dtype=_DEFAULT_DTYPE):
'''Read an image file as an array.'''
img = Image.open(filename)
arr = asarray(img, dtype=dtype)
arr = utils.swap_rgb(arr, _PIL_RGB)
return arr
def _pil_save(img, filename):
pil_img = Image.fromarray(img)
pil_img.save(filename)
return
def save(img, filename, random=False, ext=None):
'''Save an image array and return its path.'''
if random:
newfile = utils.rand_filename(filename, ext=ext)
else:
newfile = filename
np.normalize(img)
uint8img = np.to_uint8(img)
_pil_save(uint8img, newfile)
return newfile
if __name__ == '__main__':
pass
|
...
return
def save(img, filename, random=False, ext=None):
'''Save an image array and return its path.'''
if random:
newfile = utils.rand_filename(filename, ext=ext)
else:
newfile = filename
np.normalize(img)
...
|
982f4af638e83ee49c87a0dffad2b47daf872749
|
workers/data_refinery_workers/downloaders/test_utils.py
|
workers/data_refinery_workers/downloaders/test_utils.py
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
Add tag to downloaders test so it is actually run.
|
Add tag to downloaders test so it is actually run.
|
Python
|
bsd-3-clause
|
data-refinery/data_refinery,data-refinery/data_refinery,data-refinery/data_refinery
|
python
|
## Code Before:
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
## Instruction:
Add tag to downloaders test so it is actually run.
## Code After:
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
# ... existing code ...
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
# ... rest of the code ...
|
cbbf178a59561e828214ff88e0c73ec0716fa926
|
tests/test_ensure_do_cleanups.py
|
tests/test_ensure_do_cleanups.py
|
from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
|
from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
Test implicit `doCleanups` on tearDown
|
Test implicit `doCleanups` on tearDown
|
Python
|
mit
|
randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting
|
python
|
## Code Before:
from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
## Instruction:
Test implicit `doCleanups` on tearDown
## Code After:
from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
...
from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
...
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
...
|
d9d051b7a80025d76cfe0827f0bf632cfbd18972
|
app/handlers.py
|
app/handlers.py
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
Remove extra check of symlinks.
|
Remove extra check of symlinks.
|
Python
|
apache-2.0
|
pcinkh/fake-useragent-cache-server
|
python
|
## Code Before:
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path) or os.path.islink(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
## Instruction:
Remove extra check of symlinks.
## Code After:
import os
import io
import json
from aiohttp import web
class Handler:
def __init__(self, *, loop):
self.loop = loop
self.files = {}
def lookup_files(self, path):
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
self.files[name] = json.dumps(json.load(fp)).encode('utf-8') # noqa
def browsers(self, request):
version = request.match_info['version']
if version not in self.files:
raise web.HTTPNotFound(
text='No data was found for version {version}'.format(
version=version,
),
)
return web.json_response(body=self.files[version])
|
# ... existing code ...
for obj in os.listdir(path):
_path = os.path.join(path, obj)
if os.path.isfile(_path):
name, _ = os.path.splitext(obj)
with io.open(_path, mode='rt', encoding='utf-8') as fp:
# ... rest of the code ...
|
99be36b77741a9b2e3d330eb89e0e381b3a3081f
|
api.py
|
api.py
|
import json
from os import environ
from eve import Eve
from eve.io.mongo import Validator
from settings import API_NAME, URL_PREFIX
class KeySchemaValidator(Validator):
def _validate_keyschema(self, schema, field, dct):
"Validate all keys of dictionary `dct` against schema `schema`."
for key, value in dct.items():
self._validate_schema(schema, key, value)
api = Eve(API_NAME, validator=KeySchemaValidator)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
|
import json
from os import environ
from eve import Eve
from eve.io.mongo import Validator
from settings import API_NAME, URL_PREFIX
class KeySchemaValidator(Validator):
def _validate_keyschema(self, schema, field, dct):
"Validate all keys of dictionary `dct` against schema `schema`."
for key, value in dct.items():
self._validate_schema(schema, key, value)
api = Eve(API_NAME, validator=KeySchemaValidator)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
def delete_resource(resource):
"Delete all documents of the given resource."
return api.test_client().delete('/' + URL_PREFIX + '/' + resource)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
|
Add utility method to delete all documents of given resource
|
Add utility method to delete all documents of given resource
|
Python
|
apache-2.0
|
gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa
|
python
|
## Code Before:
import json
from os import environ
from eve import Eve
from eve.io.mongo import Validator
from settings import API_NAME, URL_PREFIX
class KeySchemaValidator(Validator):
def _validate_keyschema(self, schema, field, dct):
"Validate all keys of dictionary `dct` against schema `schema`."
for key, value in dct.items():
self._validate_schema(schema, key, value)
api = Eve(API_NAME, validator=KeySchemaValidator)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
## Instruction:
Add utility method to delete all documents of given resource
## Code After:
import json
from os import environ
from eve import Eve
from eve.io.mongo import Validator
from settings import API_NAME, URL_PREFIX
class KeySchemaValidator(Validator):
def _validate_keyschema(self, schema, field, dct):
"Validate all keys of dictionary `dct` against schema `schema`."
for key, value in dct.items():
self._validate_schema(schema, key, value)
api = Eve(API_NAME, validator=KeySchemaValidator)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
def delete_resource(resource):
"Delete all documents of the given resource."
return api.test_client().delete('/' + URL_PREFIX + '/' + resource)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
|
// ... existing code ...
data=json.dumps(document),
content_type='application/json')
def delete_resource(resource):
"Delete all documents of the given resource."
return api.test_client().delete('/' + URL_PREFIX + '/' + resource)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
// ... rest of the code ...
|
402b619aaf327932705fe8476d01af6d693a0ac9
|
src/main/kotlin/de/axelrindle/broadcaster/model/Message.kt
|
src/main/kotlin/de/axelrindle/broadcaster/model/Message.kt
|
package de.axelrindle.broadcaster.model
import net.md_5.bungee.api.chat.BaseComponent
import net.md_5.bungee.chat.ComponentSerializer
/**
* A `Message` describes a piece of information that will be broadcasted around the server.
*/
abstract class Message
/**
* @param input The input text used for configuration.
*/
(protected val input: String)
/**
* A `SimpleMessage` is just a container for holding a plain text message.
*/
class SimpleMessage(input: String) : Message(input) {
/**
* Returns just the input string to use as the message text.
*
* @return The [input].
*/
fun getText(): String = input
}
/**
* A `JsonMessage` is created from a json string and converted into an [Array] of [BaseComponent]s.
*
* @see ComponentSerializer.parse
* @see BaseComponent
*/
class JsonMessage(input: String) : Message(input) {
val components: Array<BaseComponent> = ComponentSerializer.parse(input)
}
|
package de.axelrindle.broadcaster.model
import net.md_5.bungee.api.chat.BaseComponent
import net.md_5.bungee.chat.ComponentSerializer
/**
* A `Message` describes a piece of information that will be broadcasted around the server.
*/
abstract class Message
/**
* @param input The input text used for configuration.
*/
(protected val input: String)
/**
* A `SimpleMessage` is just a container for holding a plain text message.
*/
class SimpleMessage(input: String) : Message(input) {
/**
* Returns just the input string to use as the message text.
*
* @return The [input].
*/
fun getText(): String = input
}
/**
* A `JsonMessage` is created from a json string and converted into an [Array] of [BaseComponent]s.
*
* @see ComponentSerializer.parse
* @see BaseComponent
*/
class JsonMessage(input: String) : Message(input) {
// test for class dependency
init {
Class.forName("net.md_5.bungee.chat.ComponentSerializer")
}
val components: Array<BaseComponent> = ComponentSerializer.parse(input)
}
|
Test whether required class exists
|
Test whether required class exists
|
Kotlin
|
mit
|
axelrindle/Broadcaster-Plugin
|
kotlin
|
## Code Before:
package de.axelrindle.broadcaster.model
import net.md_5.bungee.api.chat.BaseComponent
import net.md_5.bungee.chat.ComponentSerializer
/**
* A `Message` describes a piece of information that will be broadcasted around the server.
*/
abstract class Message
/**
* @param input The input text used for configuration.
*/
(protected val input: String)
/**
* A `SimpleMessage` is just a container for holding a plain text message.
*/
class SimpleMessage(input: String) : Message(input) {
/**
* Returns just the input string to use as the message text.
*
* @return The [input].
*/
fun getText(): String = input
}
/**
* A `JsonMessage` is created from a json string and converted into an [Array] of [BaseComponent]s.
*
* @see ComponentSerializer.parse
* @see BaseComponent
*/
class JsonMessage(input: String) : Message(input) {
val components: Array<BaseComponent> = ComponentSerializer.parse(input)
}
## Instruction:
Test whether required class exists
## Code After:
package de.axelrindle.broadcaster.model
import net.md_5.bungee.api.chat.BaseComponent
import net.md_5.bungee.chat.ComponentSerializer
/**
* A `Message` describes a piece of information that will be broadcasted around the server.
*/
abstract class Message
/**
* @param input The input text used for configuration.
*/
(protected val input: String)
/**
* A `SimpleMessage` is just a container for holding a plain text message.
*/
class SimpleMessage(input: String) : Message(input) {
/**
* Returns just the input string to use as the message text.
*
* @return The [input].
*/
fun getText(): String = input
}
/**
* A `JsonMessage` is created from a json string and converted into an [Array] of [BaseComponent]s.
*
* @see ComponentSerializer.parse
* @see BaseComponent
*/
class JsonMessage(input: String) : Message(input) {
// test for class dependency
init {
Class.forName("net.md_5.bungee.chat.ComponentSerializer")
}
val components: Array<BaseComponent> = ComponentSerializer.parse(input)
}
|
// ... existing code ...
* @see BaseComponent
*/
class JsonMessage(input: String) : Message(input) {
// test for class dependency
init {
Class.forName("net.md_5.bungee.chat.ComponentSerializer")
}
val components: Array<BaseComponent> = ComponentSerializer.parse(input)
}
// ... rest of the code ...
|
9236b68f85372773a53cef42d983d62de5d19b0a
|
src/main/java/com/forgeessentials/afterlife/InventoryGrave.java
|
src/main/java/com/forgeessentials/afterlife/InventoryGrave.java
|
package com.forgeessentials.afterlife;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
public class InventoryGrave extends InventoryBasic {
private Grave grave;
public InventoryGrave(Grave grave)
{
super(grave.owner + "'s grave.", false, grave.getSize());
this.grave = grave;
}
@Override
public void openInventory()
{
for (int i = 0; i < getSizeInventory(); i++)
{
setInventorySlotContents(i, (ItemStack) null);
}
for (int i = 0; i < grave.inv.length; i++)
{
if (grave.inv[i] != null)
{
setInventorySlotContents(i, grave.inv[i].copy());
}
}
super.openInventory();
}
@Override
public void closeInventory()
{
List<ItemStack> list = new ArrayList<ItemStack>();
for (int i = 0; i < getSizeInventory(); i++)
{
ItemStack is = getStackInSlot(i);
if (is != null)
{
list.add(is);
}
}
grave.inv = list.toArray(new ItemStack[list.size()]);
grave.checkGrave();
grave.setOpen(false);
super.closeInventory();
}
}
|
package com.forgeessentials.afterlife;
import com.forgeessentials.util.UserIdent;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
import java.util.ArrayList;
import java.util.List;
public class InventoryGrave extends InventoryBasic {
private Grave grave;
public InventoryGrave(Grave grave)
{
super(new UserIdent(grave.owner).getUsername() + "'s grave.", false, grave.getSize());
this.grave = grave;
}
@Override
public void openInventory()
{
for (int i = 0; i < getSizeInventory(); i++)
{
setInventorySlotContents(i, (ItemStack) null);
}
for (int i = 0; i < grave.inv.length; i++)
{
if (grave.inv[i] != null)
{
setInventorySlotContents(i, grave.inv[i].copy());
}
}
super.openInventory();
}
@Override
public void closeInventory()
{
List<ItemStack> list = new ArrayList<ItemStack>();
for (int i = 0; i < getSizeInventory(); i++)
{
ItemStack is = getStackInSlot(i);
if (is != null)
{
list.add(is);
}
}
grave.inv = list.toArray(new ItemStack[list.size()]);
grave.checkGrave();
grave.setOpen(false);
super.closeInventory();
}
}
|
Fix showing UUIDs instead of player names in the grave inventory
|
Fix showing UUIDs instead of player names in the grave inventory
|
Java
|
epl-1.0
|
ForgeEssentials/ForgeEssentialsMain,aschmois/ForgeEssentialsMain,CityOfLearning/ForgeEssentials,liachmodded/ForgeEssentials,planetguy32/ForgeEssentials,Techjar/ForgeEssentials
|
java
|
## Code Before:
package com.forgeessentials.afterlife;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
public class InventoryGrave extends InventoryBasic {
private Grave grave;
public InventoryGrave(Grave grave)
{
super(grave.owner + "'s grave.", false, grave.getSize());
this.grave = grave;
}
@Override
public void openInventory()
{
for (int i = 0; i < getSizeInventory(); i++)
{
setInventorySlotContents(i, (ItemStack) null);
}
for (int i = 0; i < grave.inv.length; i++)
{
if (grave.inv[i] != null)
{
setInventorySlotContents(i, grave.inv[i].copy());
}
}
super.openInventory();
}
@Override
public void closeInventory()
{
List<ItemStack> list = new ArrayList<ItemStack>();
for (int i = 0; i < getSizeInventory(); i++)
{
ItemStack is = getStackInSlot(i);
if (is != null)
{
list.add(is);
}
}
grave.inv = list.toArray(new ItemStack[list.size()]);
grave.checkGrave();
grave.setOpen(false);
super.closeInventory();
}
}
## Instruction:
Fix showing UUIDs instead of player names in the grave inventory
## Code After:
package com.forgeessentials.afterlife;
import com.forgeessentials.util.UserIdent;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
import java.util.ArrayList;
import java.util.List;
public class InventoryGrave extends InventoryBasic {
private Grave grave;
public InventoryGrave(Grave grave)
{
super(new UserIdent(grave.owner).getUsername() + "'s grave.", false, grave.getSize());
this.grave = grave;
}
@Override
public void openInventory()
{
for (int i = 0; i < getSizeInventory(); i++)
{
setInventorySlotContents(i, (ItemStack) null);
}
for (int i = 0; i < grave.inv.length; i++)
{
if (grave.inv[i] != null)
{
setInventorySlotContents(i, grave.inv[i].copy());
}
}
super.openInventory();
}
@Override
public void closeInventory()
{
List<ItemStack> list = new ArrayList<ItemStack>();
for (int i = 0; i < getSizeInventory(); i++)
{
ItemStack is = getStackInSlot(i);
if (is != null)
{
list.add(is);
}
}
grave.inv = list.toArray(new ItemStack[list.size()]);
grave.checkGrave();
grave.setOpen(false);
super.closeInventory();
}
}
|
# ... existing code ...
package com.forgeessentials.afterlife;
import com.forgeessentials.util.UserIdent;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
import java.util.ArrayList;
import java.util.List;
public class InventoryGrave extends InventoryBasic {
private Grave grave;
# ... modified code ...
public InventoryGrave(Grave grave)
{
super(new UserIdent(grave.owner).getUsername() + "'s grave.", false, grave.getSize());
this.grave = grave;
}
# ... rest of the code ...
|
2d50e06c7e55c19e3055d555d78fac699c61104d
|
tests/integration/test_os_signals.py
|
tests/integration/test_os_signals.py
|
import os
import signal
import diesel
state = {'triggered':False}
def waiter():
diesel.signal(signal.SIGUSR1)
state['triggered'] = True
def test_can_wait_on_os_signals():
# Start our Loop that will wait on USR1
diesel.fork(waiter)
# Let execution switch to the newly spawned loop
diesel.sleep()
# We haven't sent the signal, so the state should not be triggered
assert not state['triggered']
# Send the USR1 signal
os.kill(os.getpid(), signal.SIGUSR1)
# Again, force a switch so the waiter can act on the signal
diesel.sleep()
# Now that we're back, the waiter should have triggered the state
assert state['triggered']
|
import os
import signal
import diesel
from diesel.util.event import Countdown
state = {'triggered':False}
def waiter():
diesel.signal(signal.SIGUSR1)
state['triggered'] = True
def test_can_wait_on_os_signals():
# Start our Loop that will wait on USR1
diesel.fork(waiter)
# Let execution switch to the newly spawned loop
diesel.sleep()
# We haven't sent the signal, so the state should not be triggered
assert not state['triggered']
# Send the USR1 signal
os.kill(os.getpid(), signal.SIGUSR1)
# Again, force a switch so the waiter can act on the signal
diesel.sleep()
# Now that we're back, the waiter should have triggered the state
assert state['triggered']
def test_multiple_signal_waiters():
N_WAITERS = 5
c = Countdown(N_WAITERS)
def mwaiter():
diesel.signal(signal.SIGUSR1)
c.tick()
for i in xrange(N_WAITERS):
diesel.fork(mwaiter)
diesel.sleep()
os.kill(os.getpid(), signal.SIGUSR1)
evt, data = diesel.first(sleep=1, waits=[c])
assert evt is c, "all waiters were not triggered!"
|
Test for multiple waiters on a signal
|
Test for multiple waiters on a signal
|
Python
|
bsd-3-clause
|
dieseldev/diesel
|
python
|
## Code Before:
import os
import signal
import diesel
state = {'triggered':False}
def waiter():
diesel.signal(signal.SIGUSR1)
state['triggered'] = True
def test_can_wait_on_os_signals():
# Start our Loop that will wait on USR1
diesel.fork(waiter)
# Let execution switch to the newly spawned loop
diesel.sleep()
# We haven't sent the signal, so the state should not be triggered
assert not state['triggered']
# Send the USR1 signal
os.kill(os.getpid(), signal.SIGUSR1)
# Again, force a switch so the waiter can act on the signal
diesel.sleep()
# Now that we're back, the waiter should have triggered the state
assert state['triggered']
## Instruction:
Test for multiple waiters on a signal
## Code After:
import os
import signal
import diesel
from diesel.util.event import Countdown
state = {'triggered':False}
def waiter():
diesel.signal(signal.SIGUSR1)
state['triggered'] = True
def test_can_wait_on_os_signals():
# Start our Loop that will wait on USR1
diesel.fork(waiter)
# Let execution switch to the newly spawned loop
diesel.sleep()
# We haven't sent the signal, so the state should not be triggered
assert not state['triggered']
# Send the USR1 signal
os.kill(os.getpid(), signal.SIGUSR1)
# Again, force a switch so the waiter can act on the signal
diesel.sleep()
# Now that we're back, the waiter should have triggered the state
assert state['triggered']
def test_multiple_signal_waiters():
N_WAITERS = 5
c = Countdown(N_WAITERS)
def mwaiter():
diesel.signal(signal.SIGUSR1)
c.tick()
for i in xrange(N_WAITERS):
diesel.fork(mwaiter)
diesel.sleep()
os.kill(os.getpid(), signal.SIGUSR1)
evt, data = diesel.first(sleep=1, waits=[c])
assert evt is c, "all waiters were not triggered!"
|
// ... existing code ...
import signal
import diesel
from diesel.util.event import Countdown
state = {'triggered':False}
// ... modified code ...
# Now that we're back, the waiter should have triggered the state
assert state['triggered']
def test_multiple_signal_waiters():
N_WAITERS = 5
c = Countdown(N_WAITERS)
def mwaiter():
diesel.signal(signal.SIGUSR1)
c.tick()
for i in xrange(N_WAITERS):
diesel.fork(mwaiter)
diesel.sleep()
os.kill(os.getpid(), signal.SIGUSR1)
evt, data = diesel.first(sleep=1, waits=[c])
assert evt is c, "all waiters were not triggered!"
// ... rest of the code ...
|
ccc6c983411f951ef3906d55d6a0946c7ef93c75
|
app/brief_utils.py
|
app/brief_utils.py
|
from flask import abort
from .models import Service
from .validation import get_validation_errors
from .service_utils import filter_services
def validate_brief_data(brief, enforce_required=True, required_fields=None):
errs = get_validation_errors(
'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug),
brief.data,
enforce_required=enforce_required,
required_fields=required_fields
)
if errs:
abort(400, errs)
def is_supplier_eligible_for_brief(supplier, brief):
services = filter_services(
framework_slugs=[brief.framework.slug],
statuses=["published"],
lot_slug=brief.lot.slug,
location=brief.data["location"],
role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None
)
services = services.filter(Service.supplier_id == supplier.supplier_id)
return services.count() > 0
|
from flask import abort
from .models import Service
from .validation import get_validation_errors
from .service_utils import filter_services
def validate_brief_data(brief, enforce_required=True, required_fields=None):
errs = get_validation_errors(
'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug),
brief.data,
enforce_required=enforce_required,
required_fields=required_fields
)
criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting']
# Only check total if all weightings are set
if all(key in brief.data for key in criteria_weighting_keys):
criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys)
if criteria_weightings != 100:
for key in set(criteria_weighting_keys) - set(errs):
errs[key] = 'total_should_be_100'
if errs:
abort(400, errs)
def is_supplier_eligible_for_brief(supplier, brief):
services = filter_services(
framework_slugs=[brief.framework.slug],
statuses=["published"],
lot_slug=brief.lot.slug,
location=brief.data["location"],
role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None
)
services = services.filter(Service.supplier_id == supplier.supplier_id)
return services.count() > 0
|
Add criteria weighting 100% total validation
|
Add criteria weighting 100% total validation
Checks the criteria weighting sum if all criteria fields are set.
This relies on all three fields being required.
If the fields don't add up to a 100 an error is added for each field
that doesn't have any other validation errors.
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
python
|
## Code Before:
from flask import abort
from .models import Service
from .validation import get_validation_errors
from .service_utils import filter_services
def validate_brief_data(brief, enforce_required=True, required_fields=None):
errs = get_validation_errors(
'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug),
brief.data,
enforce_required=enforce_required,
required_fields=required_fields
)
if errs:
abort(400, errs)
def is_supplier_eligible_for_brief(supplier, brief):
services = filter_services(
framework_slugs=[brief.framework.slug],
statuses=["published"],
lot_slug=brief.lot.slug,
location=brief.data["location"],
role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None
)
services = services.filter(Service.supplier_id == supplier.supplier_id)
return services.count() > 0
## Instruction:
Add criteria weighting 100% total validation
Checks the criteria weighting sum if all criteria fields are set.
This relies on all three fields being required.
If the fields don't add up to a 100 an error is added for each field
that doesn't have any other validation errors.
## Code After:
from flask import abort
from .models import Service
from .validation import get_validation_errors
from .service_utils import filter_services
def validate_brief_data(brief, enforce_required=True, required_fields=None):
errs = get_validation_errors(
'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug),
brief.data,
enforce_required=enforce_required,
required_fields=required_fields
)
criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting']
# Only check total if all weightings are set
if all(key in brief.data for key in criteria_weighting_keys):
criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys)
if criteria_weightings != 100:
for key in set(criteria_weighting_keys) - set(errs):
errs[key] = 'total_should_be_100'
if errs:
abort(400, errs)
def is_supplier_eligible_for_brief(supplier, brief):
services = filter_services(
framework_slugs=[brief.framework.slug],
statuses=["published"],
lot_slug=brief.lot.slug,
location=brief.data["location"],
role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None
)
services = services.filter(Service.supplier_id == supplier.supplier_id)
return services.count() > 0
|
...
enforce_required=enforce_required,
required_fields=required_fields
)
criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting']
# Only check total if all weightings are set
if all(key in brief.data for key in criteria_weighting_keys):
criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys)
if criteria_weightings != 100:
for key in set(criteria_weighting_keys) - set(errs):
errs[key] = 'total_should_be_100'
if errs:
abort(400, errs)
...
|
2370a6bbdfa740f52807e3cc5bdaeebc5a912684
|
src/net/zephyrizing/http_server/HttpRequest.java
|
src/net/zephyrizing/http_server/HttpRequest.java
|
package net.zephyrizing.http_server;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
public class HttpRequest {
public static enum Method {GET, POST};
private final Method method;
private final Path path;
private final String protocolVersion;
public HttpRequest(Method method, String path, String protocolVersion) {
this.method = method;
this.path = Paths.get(path);
this.protocolVersion = protocolVersion;
}
public Method method() {
return method;
}
public Path path() {
return path;
}
public Path getResolvedPath(Path root) {
Path relativeRequestedPath = Paths.get("/").relativize(this.path);
return root.resolve(relativeRequestedPath);
}
public String protocolVersion() {
return protocolVersion;
}
}
|
package net.zephyrizing.http_server;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
public class HttpRequest {
public static enum Method {OPTIONS, GET, HEAD, POST, PUT, DELETE, TRACE};
private final Method method;
private final Path path;
private final String protocolVersion;
public HttpRequest(Method method, String path, String protocolVersion) {
this.method = method;
this.path = Paths.get(path);
this.protocolVersion = protocolVersion;
}
public Method method() {
return method;
}
public Path path() {
return path;
}
public Path getResolvedPath(Path root) {
Path relativeRequestedPath = Paths.get("/").relativize(this.path);
return root.resolve(relativeRequestedPath);
}
public String protocolVersion() {
return protocolVersion;
}
}
|
Add the rest of the HTTP request methods
|
Add the rest of the HTTP request methods
|
Java
|
mit
|
RadicalZephyr/http-server
|
java
|
## Code Before:
package net.zephyrizing.http_server;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
public class HttpRequest {
public static enum Method {GET, POST};
private final Method method;
private final Path path;
private final String protocolVersion;
public HttpRequest(Method method, String path, String protocolVersion) {
this.method = method;
this.path = Paths.get(path);
this.protocolVersion = protocolVersion;
}
public Method method() {
return method;
}
public Path path() {
return path;
}
public Path getResolvedPath(Path root) {
Path relativeRequestedPath = Paths.get("/").relativize(this.path);
return root.resolve(relativeRequestedPath);
}
public String protocolVersion() {
return protocolVersion;
}
}
## Instruction:
Add the rest of the HTTP request methods
## Code After:
package net.zephyrizing.http_server;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
public class HttpRequest {
public static enum Method {OPTIONS, GET, HEAD, POST, PUT, DELETE, TRACE};
private final Method method;
private final Path path;
private final String protocolVersion;
public HttpRequest(Method method, String path, String protocolVersion) {
this.method = method;
this.path = Paths.get(path);
this.protocolVersion = protocolVersion;
}
public Method method() {
return method;
}
public Path path() {
return path;
}
public Path getResolvedPath(Path root) {
Path relativeRequestedPath = Paths.get("/").relativize(this.path);
return root.resolve(relativeRequestedPath);
}
public String protocolVersion() {
return protocolVersion;
}
}
|
...
import java.util.List;
public class HttpRequest {
public static enum Method {OPTIONS, GET, HEAD, POST, PUT, DELETE, TRACE};
private final Method method;
private final Path path;
...
|
1f16a851da074b56e1fd21b23f6ae68428050c61
|
workflow/drools/domains/src/main/java/org/openengsb/drools/DomainRegistry.java
|
workflow/drools/domains/src/main/java/org/openengsb/drools/DomainRegistry.java
|
/**
Copyright 2010 OpenEngSB Division, Vienna University of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE\-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openengsb.drools;
import java.util.HashMap;
import java.util.Map;
public class DomainRegistry {
public final static Map<String, Class<? extends Domain>> domains = new HashMap<String, Class<? extends Domain>>();
static {
domains.put("helper", MessageHelper.class);
domains.put("issue", DroolsIssuesDomain.class);
domains.put("notification", NotificationDomain.class);
domains.put("scm", ScmDomain.class);
domains.put("test", TestDomain.class);
domains.put("build", BuildDomain.class);
domains.put("deploy", DeployDomain.class);
}
private DomainRegistry() {
throw new AssertionError();
}
}
|
/**
Copyright 2010 OpenEngSB Division, Vienna University of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE\-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openengsb.drools;
import java.util.HashMap;
import java.util.Map;
public class DomainRegistry {
public final static Map<String, Class<? extends Domain>> domains = new HashMap<String, Class<? extends Domain>>();
static {
domains.put("helper", MessageHelper.class);
domains.put("issue", DroolsIssuesDomain.class);
domains.put("notification", NotificationDomain.class);
domains.put("scm", ScmDomain.class);
domains.put("test", TestDomain.class);
domains.put("build", BuildDomain.class);
domains.put("deploy", DeployDomain.class);
domains.put("report", ReportDomain.class);
}
private DomainRegistry() {
throw new AssertionError();
}
}
|
Add report domain to registry.
|
Add report domain to registry.
Signed-off-by: Michael Handler <[email protected]>
|
Java
|
apache-2.0
|
tobster/openengsb,tobster/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,tobster/openengsb,openengsb/openengsb,openengsb/openengsb,tobster/openengsb,openengsb/openengsb
|
java
|
## Code Before:
/**
Copyright 2010 OpenEngSB Division, Vienna University of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE\-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openengsb.drools;
import java.util.HashMap;
import java.util.Map;
public class DomainRegistry {
public final static Map<String, Class<? extends Domain>> domains = new HashMap<String, Class<? extends Domain>>();
static {
domains.put("helper", MessageHelper.class);
domains.put("issue", DroolsIssuesDomain.class);
domains.put("notification", NotificationDomain.class);
domains.put("scm", ScmDomain.class);
domains.put("test", TestDomain.class);
domains.put("build", BuildDomain.class);
domains.put("deploy", DeployDomain.class);
}
private DomainRegistry() {
throw new AssertionError();
}
}
## Instruction:
Add report domain to registry.
Signed-off-by: Michael Handler <[email protected]>
## Code After:
/**
Copyright 2010 OpenEngSB Division, Vienna University of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE\-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openengsb.drools;
import java.util.HashMap;
import java.util.Map;
public class DomainRegistry {
public final static Map<String, Class<? extends Domain>> domains = new HashMap<String, Class<? extends Domain>>();
static {
domains.put("helper", MessageHelper.class);
domains.put("issue", DroolsIssuesDomain.class);
domains.put("notification", NotificationDomain.class);
domains.put("scm", ScmDomain.class);
domains.put("test", TestDomain.class);
domains.put("build", BuildDomain.class);
domains.put("deploy", DeployDomain.class);
domains.put("report", ReportDomain.class);
}
private DomainRegistry() {
throw new AssertionError();
}
}
|
# ... existing code ...
domains.put("test", TestDomain.class);
domains.put("build", BuildDomain.class);
domains.put("deploy", DeployDomain.class);
domains.put("report", ReportDomain.class);
}
private DomainRegistry() {
# ... rest of the code ...
|
17956eb2b8089432ff2a0fcec5ce56884f904db1
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='django-cloudslave',
version='0.2.0',
description='Create, use and destroy temporary slaves in the cloud',
author='Soren Hansen',
author_email='[email protected]',
url='http://github.com/sorenh/python-django-cloudslave',
packages=find_packages(),
include_package_data=True,
license='Apache 2.0',
keywords='django openstack cloud',
install_requires=[
'django',
'python-novaclient'
],
test_suite='tests.main',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development',
]
)
|
from setuptools import setup, find_packages
setup(
name='django-cloudslave',
version='0.2.0',
description='Create, use and destroy temporary slaves in the cloud',
author='Soren Hansen',
author_email='[email protected]',
url='http://github.com/sorenh/python-django-cloudslave',
packages=find_packages(),
include_package_data=True,
license='Apache 2.0',
keywords='django openstack cloud',
install_requires=[
'django',
'python-novaclient',
'south'
],
test_suite='tests.main',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development',
]
)
|
Add south as a dependency.
|
Add south as a dependency.
|
Python
|
apache-2.0
|
sorenh/python-django-cloudslave
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name='django-cloudslave',
version='0.2.0',
description='Create, use and destroy temporary slaves in the cloud',
author='Soren Hansen',
author_email='[email protected]',
url='http://github.com/sorenh/python-django-cloudslave',
packages=find_packages(),
include_package_data=True,
license='Apache 2.0',
keywords='django openstack cloud',
install_requires=[
'django',
'python-novaclient'
],
test_suite='tests.main',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development',
]
)
## Instruction:
Add south as a dependency.
## Code After:
from setuptools import setup, find_packages
setup(
name='django-cloudslave',
version='0.2.0',
description='Create, use and destroy temporary slaves in the cloud',
author='Soren Hansen',
author_email='[email protected]',
url='http://github.com/sorenh/python-django-cloudslave',
packages=find_packages(),
include_package_data=True,
license='Apache 2.0',
keywords='django openstack cloud',
install_requires=[
'django',
'python-novaclient',
'south'
],
test_suite='tests.main',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development',
]
)
|
// ... existing code ...
keywords='django openstack cloud',
install_requires=[
'django',
'python-novaclient',
'south'
],
test_suite='tests.main',
classifiers=[
// ... rest of the code ...
|
21858e2137d3b15089c5d036cd99d4a3be4e3dbe
|
python/sanitytest.py
|
python/sanitytest.py
|
import libvirt
globals = dir(libvirt)
# Sanity test that the generator hasn't gone wrong
# Look for core classes
assert("virConnect" in globals)
assert("virDomain" in globals)
assert("virDomainSnapshot" in globals)
assert("virInterface" in globals)
assert("virNWFilter" in globals)
assert("virNodeDevice" in globals)
assert("virNetwork" in globals)
assert("virSecret" in globals)
assert("virStoragePool" in globals)
assert("virStorageVol" in globals)
assert("virStream" in globals)
assert("VIR_CONNECT_RO" in globals)
# Error related bits
assert("libvirtError" in globals)
assert("VIR_ERR_AUTH_FAILED" in globals)
assert("virGetLastError" in globals)
# Some misc methods
assert("virInitialize" in globals)
assert("virEventAddHandle" in globals)
assert("virEventRegisterDefaultImpl" in globals)
|
import libvirt
globals = dir(libvirt)
# Sanity test that the generator hasn't gone wrong
# Look for core classes
for clsname in ["virConnect",
"virDomain",
"virDomainSnapshot",
"virInterface",
"virNWFilter",
"virNodeDevice",
"virNetwork",
"virSecret",
"virStoragePool",
"virStorageVol",
"virStream",
]:
assert(clsname in globals)
assert(object in getattr(libvirt, clsname).__bases__)
# Constants
assert("VIR_CONNECT_RO" in globals)
# Error related bits
assert("libvirtError" in globals)
assert("VIR_ERR_AUTH_FAILED" in globals)
assert("virGetLastError" in globals)
# Some misc methods
assert("virInitialize" in globals)
assert("virEventAddHandle" in globals)
assert("virEventRegisterDefaultImpl" in globals)
|
Check if classes are derived from object
|
Check if classes are derived from object
This makes sure we don't regress to old style classes
|
Python
|
lgpl-2.1
|
trainstack/libvirt,siboulet/libvirt-openvz,elmarco/libvirt,crobinso/libvirt,eskultety/libvirt,crobinso/libvirt,shugaoye/libvirt,libvirt/libvirt,fabianfreyer/libvirt,iam-TJ/libvirt,eskultety/libvirt,olafhering/libvirt,shugaoye/libvirt,shugaoye/libvirt,rlaager/libvirt,cbosdo/libvirt,rlaager/libvirt,nertpinx/libvirt,andreabolognani/libvirt,taget/libvirt,iam-TJ/libvirt,trainstack/libvirt,iam-TJ/libvirt,cbosdo/libvirt,olafhering/libvirt,olafhering/libvirt,VenkatDatta/libvirt,zhlcindy/libvirt-1.1.4-maintain,agx/libvirt,zippy2/libvirt,cbosdo/libvirt,siboulet/libvirt-openvz,andreabolognani/libvirt,olafhering/libvirt,trainstack/libvirt,agx/libvirt,trainstack/libvirt,datto/libvirt,VenkatDatta/libvirt,zippy2/libvirt,elmarco/libvirt,cbosdo/libvirt,VenkatDatta/libvirt,crobinso/libvirt,VenkatDatta/libvirt,jardasgit/libvirt,eskultety/libvirt,nertpinx/libvirt,datto/libvirt,elmarco/libvirt,fabianfreyer/libvirt,agx/libvirt,fabianfreyer/libvirt,shugaoye/libvirt,libvirt/libvirt,siboulet/libvirt-openvz,elmarco/libvirt,datto/libvirt,fabianfreyer/libvirt,rlaager/libvirt,zippy2/libvirt,taget/libvirt,iam-TJ/libvirt,zippy2/libvirt,andreabolognani/libvirt,fabianfreyer/libvirt,crobinso/libvirt,siboulet/libvirt-openvz,rlaager/libvirt,agx/libvirt,nertpinx/libvirt,andreabolognani/libvirt,jardasgit/libvirt,taget/libvirt,eskultety/libvirt,jfehlig/libvirt,agx/libvirt,zhlcindy/libvirt-1.1.4-maintain,rlaager/libvirt,zhlcindy/libvirt-1.1.4-maintain,libvirt/libvirt,iam-TJ/libvirt,andreabolognani/libvirt,eskultety/libvirt,VenkatDatta/libvirt,jfehlig/libvirt,elmarco/libvirt,jardasgit/libvirt,jfehlig/libvirt,nertpinx/libvirt,iam-TJ/libvirt,zhlcindy/libvirt-1.1.4-maintain,shugaoye/libvirt,jardasgit/libvirt,trainstack/libvirt,iam-TJ/libvirt,libvirt/libvirt,taget/libvirt,taget/libvirt,zhlcindy/libvirt-1.1.4-maintain,trainstack/libvirt,jfehlig/libvirt,nertpinx/libvirt,trainstack/libvirt,datto/libvirt,datto/libvirt,siboulet/libvirt-openvz,jardasgit/libvirt,cbosdo/libvirt
|
python
|
## Code Before:
import libvirt
globals = dir(libvirt)
# Sanity test that the generator hasn't gone wrong
# Look for core classes
assert("virConnect" in globals)
assert("virDomain" in globals)
assert("virDomainSnapshot" in globals)
assert("virInterface" in globals)
assert("virNWFilter" in globals)
assert("virNodeDevice" in globals)
assert("virNetwork" in globals)
assert("virSecret" in globals)
assert("virStoragePool" in globals)
assert("virStorageVol" in globals)
assert("virStream" in globals)
assert("VIR_CONNECT_RO" in globals)
# Error related bits
assert("libvirtError" in globals)
assert("VIR_ERR_AUTH_FAILED" in globals)
assert("virGetLastError" in globals)
# Some misc methods
assert("virInitialize" in globals)
assert("virEventAddHandle" in globals)
assert("virEventRegisterDefaultImpl" in globals)
## Instruction:
Check if classes are derived from object
This makes sure we don't regress to old style classes
## Code After:
import libvirt
globals = dir(libvirt)
# Sanity test that the generator hasn't gone wrong
# Look for core classes
for clsname in ["virConnect",
"virDomain",
"virDomainSnapshot",
"virInterface",
"virNWFilter",
"virNodeDevice",
"virNetwork",
"virSecret",
"virStoragePool",
"virStorageVol",
"virStream",
]:
assert(clsname in globals)
assert(object in getattr(libvirt, clsname).__bases__)
# Constants
assert("VIR_CONNECT_RO" in globals)
# Error related bits
assert("libvirtError" in globals)
assert("VIR_ERR_AUTH_FAILED" in globals)
assert("virGetLastError" in globals)
# Some misc methods
assert("virInitialize" in globals)
assert("virEventAddHandle" in globals)
assert("virEventRegisterDefaultImpl" in globals)
|
...
# Sanity test that the generator hasn't gone wrong
# Look for core classes
for clsname in ["virConnect",
"virDomain",
"virDomainSnapshot",
"virInterface",
"virNWFilter",
"virNodeDevice",
"virNetwork",
"virSecret",
"virStoragePool",
"virStorageVol",
"virStream",
]:
assert(clsname in globals)
assert(object in getattr(libvirt, clsname).__bases__)
# Constants
assert("VIR_CONNECT_RO" in globals)
# Error related bits
...
|
524d5427d54342f26008a5b527140d4158f70edf
|
tests/test_extension.py
|
tests/test_extension.py
|
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
|
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
|
Clear websocket data to try and fix Travis
|
Clear websocket data to try and fix Travis
|
Python
|
agpl-3.0
|
palfrey/mopidy-tachikoma,palfrey/mopidy-tachikoma
|
python
|
## Code Before:
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
## Instruction:
Clear websocket data to try and fix Travis
## Code After:
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
|
// ... existing code ...
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
// ... rest of the code ...
|
54cf69b4c105038f896ceaf8af10c82fd3772bf9
|
pyethapp/tests/test_export.py
|
pyethapp/tests/test_export.py
|
from StringIO import StringIO
import subprocess
from pyethapp.app import app
from click.testing import CliRunner
from ethereum.blocks import BlockHeader
import rlp
def test_export():
# requires a chain with at least 5 blocks
assert subprocess.call('pyethapp export', shell=True) != 0
assert subprocess.call('pyethapp export --from -1 -', shell=True) != 0
assert subprocess.call('pyethapp export --to -3 -', shell=True) != 0
assert subprocess.call('pyethapp export --from 4 --to 2 -', shell=True) != 0
result = subprocess.Popen('pyethapp export --from 2 --to 4 -', shell=True,
stdout=subprocess.PIPE)
result.wait()
assert result.returncode == 0
s = result.stdout.read()
headers = []
end = 0
while end < len(s):
item, end = rlp.codec.consume_item(s, end)
headers.append(BlockHeader.deserialize(item[0]))
assert [header.number for header in headers] == [2, 3, 4]
|
from StringIO import StringIO
import subprocess
from pyethapp.app import app
from click.testing import CliRunner
from ethereum.blocks import BlockHeader
import rlp
import pytest
@pytest.mark.xfail # can not work without mock-up chain
def test_export():
# requires a chain with at least 5 blocks
assert subprocess.call('pyethapp export', shell=True) != 0
assert subprocess.call('pyethapp export --from -1 -', shell=True) != 0
assert subprocess.call('pyethapp export --to -3 -', shell=True) != 0
assert subprocess.call('pyethapp export --from 4 --to 2 -', shell=True) != 0
result = subprocess.Popen('pyethapp export --from 2 --to 4 -', shell=True,
stdout=subprocess.PIPE)
result.wait()
assert result.returncode == 0
s = result.stdout.read()
headers = []
end = 0
while end < len(s):
item, end = rlp.codec.consume_item(s, end)
headers.append(BlockHeader.deserialize(item[0]))
assert [header.number for header in headers] == [2, 3, 4]
|
Mark export test XFAIL since no chain mockup exists
|
Mark export test XFAIL since no chain mockup exists
|
Python
|
mit
|
gsalgado/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp,gsalgado/pyethapp,ethereum/pyethapp,changwu-tw/pyethapp,vaporry/pyethapp,RomanZacharia/pyethapp,d-das/pyethapp
|
python
|
## Code Before:
from StringIO import StringIO
import subprocess
from pyethapp.app import app
from click.testing import CliRunner
from ethereum.blocks import BlockHeader
import rlp
def test_export():
# requires a chain with at least 5 blocks
assert subprocess.call('pyethapp export', shell=True) != 0
assert subprocess.call('pyethapp export --from -1 -', shell=True) != 0
assert subprocess.call('pyethapp export --to -3 -', shell=True) != 0
assert subprocess.call('pyethapp export --from 4 --to 2 -', shell=True) != 0
result = subprocess.Popen('pyethapp export --from 2 --to 4 -', shell=True,
stdout=subprocess.PIPE)
result.wait()
assert result.returncode == 0
s = result.stdout.read()
headers = []
end = 0
while end < len(s):
item, end = rlp.codec.consume_item(s, end)
headers.append(BlockHeader.deserialize(item[0]))
assert [header.number for header in headers] == [2, 3, 4]
## Instruction:
Mark export test XFAIL since no chain mockup exists
## Code After:
from StringIO import StringIO
import subprocess
from pyethapp.app import app
from click.testing import CliRunner
from ethereum.blocks import BlockHeader
import rlp
import pytest
@pytest.mark.xfail # can not work without mock-up chain
def test_export():
# requires a chain with at least 5 blocks
assert subprocess.call('pyethapp export', shell=True) != 0
assert subprocess.call('pyethapp export --from -1 -', shell=True) != 0
assert subprocess.call('pyethapp export --to -3 -', shell=True) != 0
assert subprocess.call('pyethapp export --from 4 --to 2 -', shell=True) != 0
result = subprocess.Popen('pyethapp export --from 2 --to 4 -', shell=True,
stdout=subprocess.PIPE)
result.wait()
assert result.returncode == 0
s = result.stdout.read()
headers = []
end = 0
while end < len(s):
item, end = rlp.codec.consume_item(s, end)
headers.append(BlockHeader.deserialize(item[0]))
assert [header.number for header in headers] == [2, 3, 4]
|
# ... existing code ...
from click.testing import CliRunner
from ethereum.blocks import BlockHeader
import rlp
import pytest
@pytest.mark.xfail # can not work without mock-up chain
def test_export():
# requires a chain with at least 5 blocks
assert subprocess.call('pyethapp export', shell=True) != 0
# ... rest of the code ...
|
a4102e8a4aeaedc8ad9d258d7962f1f661cf6abb
|
src/main/java/com/laytonsmith/abstraction/enums/MCTeleportCause.java
|
src/main/java/com/laytonsmith/abstraction/enums/MCTeleportCause.java
|
package com.laytonsmith.abstraction.enums;
import com.laytonsmith.annotations.MEnum;
@MEnum("TeleportCause")
public enum MCTeleportCause {
COMMAND,
END_PORTAL,
ENDER_PEARL,
NETHER_PORTAL,
PLUGIN,
SPECTATE,
UNKNOWN
}
|
package com.laytonsmith.abstraction.enums;
import com.laytonsmith.annotations.MEnum;
@MEnum("TeleportCause")
public enum MCTeleportCause {
COMMAND,
END_PORTAL,
ENDER_PEARL,
NETHER_PORTAL,
PLUGIN,
SPECTATE,
END_GATEWAY,
UNKNOWN
}
|
Add END_GATEWAY to teleport causes
|
Add END_GATEWAY to teleport causes
|
Java
|
mit
|
sk89q/CommandHelper,sk89q/CommandHelper,sk89q/CommandHelper,sk89q/CommandHelper
|
java
|
## Code Before:
package com.laytonsmith.abstraction.enums;
import com.laytonsmith.annotations.MEnum;
@MEnum("TeleportCause")
public enum MCTeleportCause {
COMMAND,
END_PORTAL,
ENDER_PEARL,
NETHER_PORTAL,
PLUGIN,
SPECTATE,
UNKNOWN
}
## Instruction:
Add END_GATEWAY to teleport causes
## Code After:
package com.laytonsmith.abstraction.enums;
import com.laytonsmith.annotations.MEnum;
@MEnum("TeleportCause")
public enum MCTeleportCause {
COMMAND,
END_PORTAL,
ENDER_PEARL,
NETHER_PORTAL,
PLUGIN,
SPECTATE,
END_GATEWAY,
UNKNOWN
}
|
// ... existing code ...
NETHER_PORTAL,
PLUGIN,
SPECTATE,
END_GATEWAY,
UNKNOWN
}
// ... rest of the code ...
|
5c35854561d09811e1d16279f551b6ded1a90595
|
src/test/java/org/anarres/jdiagnostics/DefaultQueryTest.java
|
src/test/java/org/anarres/jdiagnostics/DefaultQueryTest.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.anarres.jdiagnostics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Test;
/**
*
* @author shevek
*/
public class DefaultQueryTest {
private static final Log LOG = LogFactory.getLog(DefaultQueryTest.class);
@Test
public void testQuery() {
DefaultQuery query = new DefaultQuery();
query.add(new ThrowableQuery(new Exception()));
LOG.info(query.call());
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.anarres.jdiagnostics;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author shevek
*/
public class DefaultQueryTest {
private static final Logger LOG = LoggerFactory.getLogger(DefaultQueryTest.class);
@Test
public void testQuery() {
DefaultQuery query = new DefaultQuery();
query.add(new ThrowableQuery(new Exception()));
LOG.info(String.valueOf(query.call()));
}
}
|
Fix tests to work with slf4j.
|
Fix tests to work with slf4j.
|
Java
|
apache-2.0
|
shevek/jdiagnostics,shevek/jdiagnostics
|
java
|
## Code Before:
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.anarres.jdiagnostics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Test;
/**
*
* @author shevek
*/
public class DefaultQueryTest {
private static final Log LOG = LogFactory.getLog(DefaultQueryTest.class);
@Test
public void testQuery() {
DefaultQuery query = new DefaultQuery();
query.add(new ThrowableQuery(new Exception()));
LOG.info(query.call());
}
}
## Instruction:
Fix tests to work with slf4j.
## Code After:
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.anarres.jdiagnostics;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author shevek
*/
public class DefaultQueryTest {
private static final Logger LOG = LoggerFactory.getLogger(DefaultQueryTest.class);
@Test
public void testQuery() {
DefaultQuery query = new DefaultQuery();
query.add(new ThrowableQuery(new Exception()));
LOG.info(String.valueOf(query.call()));
}
}
|
# ... existing code ...
*/
package org.anarres.jdiagnostics;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
# ... modified code ...
*/
public class DefaultQueryTest {
private static final Logger LOG = LoggerFactory.getLogger(DefaultQueryTest.class);
@Test
public void testQuery() {
DefaultQuery query = new DefaultQuery();
query.add(new ThrowableQuery(new Exception()));
LOG.info(String.valueOf(query.call()));
}
}
# ... rest of the code ...
|
f6f4d5352e9a41135e18e1f55b6c4ef39c662b53
|
src/main/java/Main.java
|
src/main/java/Main.java
|
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.sun.grizzly.http.SelectorThread;
import com.sun.jersey.api.container.grizzly.GrizzlyWebContainerFactory;
public class Main {
public static void main(String[] args) throws IOException {
final String baseUri = "http://localhost:"+(System.getenv("PORT")!=null?System.getenv("PORT"):"9998")+"/";
final Map<String, String> initParams = new HashMap<String, String>();
initParams.put("com.sun.jersey.config.property.packages","resources");
System.out.println("Starting grizzly...");
SelectorThread threadSelector = GrizzlyWebContainerFactory.create(baseUri, initParams);
System.out.println(String.format("Jersey started with WADL available at %sapplication.wadl.",baseUri, baseUri));
}
}
|
import com.sun.jersey.api.container.grizzly.GrizzlyServerFactory;
import com.sun.jersey.api.core.PackagesResourceConfig;
import javax.ws.rs.core.UriBuilder;
import java.io.IOException;
import java.net.URI;
public class Main {
private static final int DEFAULT_PORT = 9998;
public static void main(String[] args) throws IOException {
GrizzlyServerFactory.create(getBaseUri(), getResourceConfig());
}
private static URI getBaseUri() {
final int port = System.getenv("PORT") != null ? Integer.parseInt(System.getenv("PORT")) : DEFAULT_PORT;
return UriBuilder.fromUri("http://localhost/").port(port).build();
}
private static PackagesResourceConfig getResourceConfig() {
return new PackagesResourceConfig("resources");
}
}
|
Simplify server start in main method
|
Simplify server start in main method
|
Java
|
agpl-3.0
|
Lyndir/love-lyndir.server,Lyndir/love-lyndir.server
|
java
|
## Code Before:
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.sun.grizzly.http.SelectorThread;
import com.sun.jersey.api.container.grizzly.GrizzlyWebContainerFactory;
public class Main {
public static void main(String[] args) throws IOException {
final String baseUri = "http://localhost:"+(System.getenv("PORT")!=null?System.getenv("PORT"):"9998")+"/";
final Map<String, String> initParams = new HashMap<String, String>();
initParams.put("com.sun.jersey.config.property.packages","resources");
System.out.println("Starting grizzly...");
SelectorThread threadSelector = GrizzlyWebContainerFactory.create(baseUri, initParams);
System.out.println(String.format("Jersey started with WADL available at %sapplication.wadl.",baseUri, baseUri));
}
}
## Instruction:
Simplify server start in main method
## Code After:
import com.sun.jersey.api.container.grizzly.GrizzlyServerFactory;
import com.sun.jersey.api.core.PackagesResourceConfig;
import javax.ws.rs.core.UriBuilder;
import java.io.IOException;
import java.net.URI;
public class Main {
private static final int DEFAULT_PORT = 9998;
public static void main(String[] args) throws IOException {
GrizzlyServerFactory.create(getBaseUri(), getResourceConfig());
}
private static URI getBaseUri() {
final int port = System.getenv("PORT") != null ? Integer.parseInt(System.getenv("PORT")) : DEFAULT_PORT;
return UriBuilder.fromUri("http://localhost/").port(port).build();
}
private static PackagesResourceConfig getResourceConfig() {
return new PackagesResourceConfig("resources");
}
}
|
// ... existing code ...
import com.sun.jersey.api.container.grizzly.GrizzlyServerFactory;
import com.sun.jersey.api.core.PackagesResourceConfig;
import javax.ws.rs.core.UriBuilder;
import java.io.IOException;
import java.net.URI;
public class Main {
private static final int DEFAULT_PORT = 9998;
public static void main(String[] args) throws IOException {
GrizzlyServerFactory.create(getBaseUri(), getResourceConfig());
}
private static URI getBaseUri() {
final int port = System.getenv("PORT") != null ? Integer.parseInt(System.getenv("PORT")) : DEFAULT_PORT;
return UriBuilder.fromUri("http://localhost/").port(port).build();
}
private static PackagesResourceConfig getResourceConfig() {
return new PackagesResourceConfig("resources");
}
}
// ... rest of the code ...
|
a92f67db0b8fcebef0d302bf3d50f3d05d889bf8
|
WatchMeTest/src/se/chalmers/watchmetest/net/IMDBHandlerTest.java
|
WatchMeTest/src/se/chalmers/watchmetest/net/IMDBHandlerTest.java
|
/**
* IMDBHandlerTest.java
*
* @author Johan Brook
* @copyright (c) 2012 Johan Brook
* @license MIT
*/
package se.chalmers.watchmetest.net;
import org.json.JSONArray;
import org.json.JSONObject;
import se.chalmers.watchme.net.IMDBHandler;
import junit.framework.TestCase;
public class IMDBHandlerTest extends TestCase {
IMDBHandler imdb;
protected void setUp() throws Exception {
super.setUp();
this.imdb = new IMDBHandler();
}
public void testGetMovies() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
assertNotNull(json);
assertTrue(json.length() > 0);
}
public void testGetMovie() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
JSONObject movie = json.optJSONObject(0);
assertNotNull(movie);
}
public void testGetNonExistingMovie() {
JSONArray json = this.imdb.getMoviesByTitle("awdkaowidoawijdwoaijdawoidjaowid");
assertNull(json);
}
}
|
/**
* IMDBHandlerTest.java
*
* @author Johan Brook
* @copyright (c) 2012 Johan Brook
* @license MIT
*/
package se.chalmers.watchmetest.net;
import org.json.JSONArray;
import org.json.JSONObject;
import se.chalmers.watchme.net.IMDBHandler;
import se.chalmers.watchme.utils.MovieHelper;
import junit.framework.TestCase;
public class IMDBHandlerTest extends TestCase {
IMDBHandler imdb;
protected void setUp() throws Exception {
super.setUp();
this.imdb = new IMDBHandler();
}
public void testGetMovies() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
assertNotNull(json);
assertTrue(json.length() > 0);
}
public void testGetMovie() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
JSONObject movie = json.optJSONObject(0);
assertNotNull(movie);
}
public void testGetNonExistingMovie() {
JSONArray json = this.imdb.getMoviesByTitle("awdkaowidoawijdwoaijdawoidjaowid");
assertNull(json);
}
public void testParseStringToJSON() {
String json = "[{key: \"val\"}]";
String json2 = "{key: \"val\"}";
JSONObject res = IMDBHandler.parseStringToJSON(json);
JSONObject res2 = IMDBHandler.parseStringToJSON(json2);
assertNotNull(res);
assertNotNull(res2);
assertNotNull(res.optString("key"));
assertNotNull(res2.optString("key"));
}
}
|
Add test for JSON parse method
|
Add test for JSON parse method
|
Java
|
mit
|
johanbrook/watchme
|
java
|
## Code Before:
/**
* IMDBHandlerTest.java
*
* @author Johan Brook
* @copyright (c) 2012 Johan Brook
* @license MIT
*/
package se.chalmers.watchmetest.net;
import org.json.JSONArray;
import org.json.JSONObject;
import se.chalmers.watchme.net.IMDBHandler;
import junit.framework.TestCase;
public class IMDBHandlerTest extends TestCase {
IMDBHandler imdb;
protected void setUp() throws Exception {
super.setUp();
this.imdb = new IMDBHandler();
}
public void testGetMovies() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
assertNotNull(json);
assertTrue(json.length() > 0);
}
public void testGetMovie() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
JSONObject movie = json.optJSONObject(0);
assertNotNull(movie);
}
public void testGetNonExistingMovie() {
JSONArray json = this.imdb.getMoviesByTitle("awdkaowidoawijdwoaijdawoidjaowid");
assertNull(json);
}
}
## Instruction:
Add test for JSON parse method
## Code After:
/**
* IMDBHandlerTest.java
*
* @author Johan Brook
* @copyright (c) 2012 Johan Brook
* @license MIT
*/
package se.chalmers.watchmetest.net;
import org.json.JSONArray;
import org.json.JSONObject;
import se.chalmers.watchme.net.IMDBHandler;
import se.chalmers.watchme.utils.MovieHelper;
import junit.framework.TestCase;
public class IMDBHandlerTest extends TestCase {
IMDBHandler imdb;
protected void setUp() throws Exception {
super.setUp();
this.imdb = new IMDBHandler();
}
public void testGetMovies() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
assertNotNull(json);
assertTrue(json.length() > 0);
}
public void testGetMovie() {
JSONArray json = this.imdb.getMoviesByTitle("casino royale");
JSONObject movie = json.optJSONObject(0);
assertNotNull(movie);
}
public void testGetNonExistingMovie() {
JSONArray json = this.imdb.getMoviesByTitle("awdkaowidoawijdwoaijdawoidjaowid");
assertNull(json);
}
public void testParseStringToJSON() {
String json = "[{key: \"val\"}]";
String json2 = "{key: \"val\"}";
JSONObject res = IMDBHandler.parseStringToJSON(json);
JSONObject res2 = IMDBHandler.parseStringToJSON(json2);
assertNotNull(res);
assertNotNull(res2);
assertNotNull(res.optString("key"));
assertNotNull(res2.optString("key"));
}
}
|
# ... existing code ...
import org.json.JSONObject;
import se.chalmers.watchme.net.IMDBHandler;
import se.chalmers.watchme.utils.MovieHelper;
import junit.framework.TestCase;
public class IMDBHandlerTest extends TestCase {
# ... modified code ...
JSONArray json = this.imdb.getMoviesByTitle("awdkaowidoawijdwoaijdawoidjaowid");
assertNull(json);
}
public void testParseStringToJSON() {
String json = "[{key: \"val\"}]";
String json2 = "{key: \"val\"}";
JSONObject res = IMDBHandler.parseStringToJSON(json);
JSONObject res2 = IMDBHandler.parseStringToJSON(json2);
assertNotNull(res);
assertNotNull(res2);
assertNotNull(res.optString("key"));
assertNotNull(res2.optString("key"));
}
}
# ... rest of the code ...
|
eeeba609afe732b8e95aa535e70d4cdd2ae1aac7
|
tests/unit/test_cufflinks.py
|
tests/unit/test_cufflinks.py
|
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
# fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
# fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
# assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
Remove some cruft from the cufflinks test.
|
Remove some cruft from the cufflinks test.
|
Python
|
mit
|
vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,verdurin/bcbio-nextgen,fw1121/bcbio-nextgen,gifford-lab/bcbio-nextgen,chapmanb/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,verdurin/bcbio-nextgen,lbeltrame/bcbio-nextgen,verdurin/bcbio-nextgen,SciLifeLab/bcbio-nextgen,chapmanb/bcbio-nextgen,lpantano/bcbio-nextgen,vladsaveliev/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,mjafin/bcbio-nextgen,brainstorm/bcbio-nextgen,lbeltrame/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,fw1121/bcbio-nextgen,a113n/bcbio-nextgen,brainstorm/bcbio-nextgen,SciLifeLab/bcbio-nextgen,mjafin/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,mjafin/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,chapmanb/bcbio-nextgen,gifford-lab/bcbio-nextgen,lpantano/bcbio-nextgen,lpantano/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,gifford-lab/bcbio-nextgen,fw1121/bcbio-nextgen,vladsaveliev/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,SciLifeLab/bcbio-nextgen,brainstorm/bcbio-nextgen,biocyberman/bcbio-nextgen,hjanime/bcbio-nextgen,a113n/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen
|
python
|
## Code Before:
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
# fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
# fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
# assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
## Instruction:
Remove some cruft from the cufflinks test.
## Code After:
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
# ... existing code ...
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
# ... rest of the code ...
|
221413b5715286bb7b61e18f8e678f2ca097a5e1
|
rover.py
|
rover.py
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
def set_position(self, x, y, direction):
self.x = x
self.y = y
self.direction = direction
|
Add set_position method to Rover
|
Add set_position method to Rover
|
Python
|
mit
|
authentik8/rover
|
python
|
## Code Before:
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
## Instruction:
Add set_position method to Rover
## Code After:
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
def set_position(self, x, y, direction):
self.x = x
self.y = y
self.direction = direction
|
// ... existing code ...
@property
def position(self):
return self.x, self.y, self.direction
def set_position(self, x, y, direction):
self.x = x
self.y = y
self.direction = direction
// ... rest of the code ...
|
519aff5c44c6801c44981b059654e598c6d8db49
|
second/blog/models.py
|
second/blog/models.py
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
Create Post model in model.py
|
Create Post model in model.py
|
Python
|
mit
|
ugaliguy/Django-Tutorial-Projects,ugaliguy/Django-Tutorial-Projects
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import models
# Create your models here.
## Instruction:
Create Post model in model.py
## Code After:
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
...
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
...
|
6ce92964847c7668654a2caae70b8df936bac8c2
|
src/core/src/main/java/org/geogit/api/plumbing/merge/ConflictsReadOp.java
|
src/core/src/main/java/org/geogit/api/plumbing/merge/ConflictsReadOp.java
|
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.api.plumbing.merge;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import org.geogit.api.AbstractGeoGitOp;
import org.geogit.api.plumbing.ResolveGeogitDir;
import com.google.common.base.Preconditions;
public class ConflictsReadOp extends AbstractGeoGitOp<List<Conflict>> {
@Override
public List<Conflict> call() {
final URL repoUrl = getCommandLocator().command(ResolveGeogitDir.class).call();
if (repoUrl == null) {
return Collections.emptyList();
} else {
getIndex().getDatabase().open();
return getIndex().getDatabase().getConflicts(null, null);
}
}
}
|
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.api.plumbing.merge;
import java.net.URL;
import java.util.List;
import org.geogit.api.AbstractGeoGitOp;
import org.geogit.api.plumbing.ResolveGeogitDir;
import com.google.common.collect.ImmutableList;
public class ConflictsReadOp extends AbstractGeoGitOp<List<Conflict>> {
@Override
public List<Conflict> call() {
final URL repoUrl = getCommandLocator().command(ResolveGeogitDir.class).call();
if (repoUrl == null) {
return ImmutableList.of();
} else {
return getIndex().getDatabase().getConflicts(null, null);
}
}
}
|
Remove unneeded call to StagingDatabase.open()
|
Remove unneeded call to StagingDatabase.open()
|
Java
|
bsd-3-clause
|
markles/GeoGit,annacarol/GeoGig,markles/GeoGit,annacarol/GeoGig,boundlessgeo/GeoGig,markles/GeoGit,markles/GeoGit,boundlessgeo/GeoGig,markles/GeoGit,boundlessgeo/GeoGig,annacarol/GeoGig
|
java
|
## Code Before:
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.api.plumbing.merge;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import org.geogit.api.AbstractGeoGitOp;
import org.geogit.api.plumbing.ResolveGeogitDir;
import com.google.common.base.Preconditions;
public class ConflictsReadOp extends AbstractGeoGitOp<List<Conflict>> {
@Override
public List<Conflict> call() {
final URL repoUrl = getCommandLocator().command(ResolveGeogitDir.class).call();
if (repoUrl == null) {
return Collections.emptyList();
} else {
getIndex().getDatabase().open();
return getIndex().getDatabase().getConflicts(null, null);
}
}
}
## Instruction:
Remove unneeded call to StagingDatabase.open()
## Code After:
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.api.plumbing.merge;
import java.net.URL;
import java.util.List;
import org.geogit.api.AbstractGeoGitOp;
import org.geogit.api.plumbing.ResolveGeogitDir;
import com.google.common.collect.ImmutableList;
public class ConflictsReadOp extends AbstractGeoGitOp<List<Conflict>> {
@Override
public List<Conflict> call() {
final URL repoUrl = getCommandLocator().command(ResolveGeogitDir.class).call();
if (repoUrl == null) {
return ImmutableList.of();
} else {
return getIndex().getDatabase().getConflicts(null, null);
}
}
}
|
...
import java.net.URL;
import java.util.List;
import org.geogit.api.AbstractGeoGitOp;
import org.geogit.api.plumbing.ResolveGeogitDir;
import com.google.common.collect.ImmutableList;
public class ConflictsReadOp extends AbstractGeoGitOp<List<Conflict>> {
@Override
...
public List<Conflict> call() {
final URL repoUrl = getCommandLocator().command(ResolveGeogitDir.class).call();
if (repoUrl == null) {
return ImmutableList.of();
} else {
return getIndex().getDatabase().getConflicts(null, null);
}
}
...
|
a7830d85c6966732e46da63903c04234d8d16c39
|
admin/nodes/serializers.py
|
admin/nodes/serializers.py
|
import json
from website.util.permissions import reduce_permissions
from admin.users.serializers import serialize_simple_node
def serialize_node(node):
embargo = node.embargo
if embargo is not None:
embargo = node.embargo.end_date
return {
'id': node._id,
'title': node.title,
'public': node.is_public,
'parent': node.parent_id,
'root': node.root._id,
'is_registration': node.is_registration,
'date_created': node.date_created,
'withdrawn': node.is_retracted,
'embargo': embargo,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
'spam_data': json.dumps(node.spam_data, indent=4),
'is_public': node.is_public,
}
def serialize_simple_user_and_node_permissions(node, user):
return {
'id': user._id,
'name': user.fullname,
'permission': reduce_permissions(node.get_permissions(user))
}
|
import json
from website.util.permissions import reduce_permissions
from admin.users.serializers import serialize_simple_node
def serialize_node(node):
embargo = node.embargo
if embargo is not None:
embargo = node.embargo.end_date
return {
'id': node._id,
'title': node.title,
'public': node.is_public,
'parent': node.parent_id,
'root': node.root._id,
'is_registration': node.is_registration,
'date_created': node.date_created,
'withdrawn': node.is_retracted,
'embargo': embargo,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'registered_date': node.registered_date,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
'spam_data': json.dumps(node.spam_data, indent=4),
'is_public': node.is_public,
}
def serialize_simple_user_and_node_permissions(node, user):
return {
'id': user._id,
'name': user.fullname,
'permission': reduce_permissions(node.get_permissions(user))
}
|
Add date_registered to node serializer
|
Add date_registered to node serializer
[#OSF-7230]
|
Python
|
apache-2.0
|
mattclark/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,mattclark/osf.io,caseyrollins/osf.io,chennan47/osf.io,adlius/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,chennan47/osf.io,hmoco/osf.io,caneruguz/osf.io,mfraezz/osf.io,caneruguz/osf.io,cslzchen/osf.io,sloria/osf.io,caneruguz/osf.io,felliott/osf.io,Nesiehr/osf.io,icereval/osf.io,mattclark/osf.io,binoculars/osf.io,aaxelb/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,icereval/osf.io,sloria/osf.io,chennan47/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,chrisseto/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,erinspace/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,aaxelb/osf.io,binoculars/osf.io,crcresearch/osf.io,felliott/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,baylee-d/osf.io,caneruguz/osf.io,caseyrollins/osf.io,adlius/osf.io,chrisseto/osf.io,binoculars/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,laurenrevere/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,hmoco/osf.io,Nesiehr/osf.io,mfraezz/osf.io,crcresearch/osf.io,aaxelb/osf.io,chrisseto/osf.io,crcresearch/osf.io,cwisecarver/osf.io,cslzchen/osf.io,icereval/osf.io,felliott/osf.io,adlius/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,erinspace/osf.io,saradbowman/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,chrisseto/osf.io,laurenrevere/osf.io,pattisdr/osf.io
|
python
|
## Code Before:
import json
from website.util.permissions import reduce_permissions
from admin.users.serializers import serialize_simple_node
def serialize_node(node):
embargo = node.embargo
if embargo is not None:
embargo = node.embargo.end_date
return {
'id': node._id,
'title': node.title,
'public': node.is_public,
'parent': node.parent_id,
'root': node.root._id,
'is_registration': node.is_registration,
'date_created': node.date_created,
'withdrawn': node.is_retracted,
'embargo': embargo,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
'spam_data': json.dumps(node.spam_data, indent=4),
'is_public': node.is_public,
}
def serialize_simple_user_and_node_permissions(node, user):
return {
'id': user._id,
'name': user.fullname,
'permission': reduce_permissions(node.get_permissions(user))
}
## Instruction:
Add date_registered to node serializer
[#OSF-7230]
## Code After:
import json
from website.util.permissions import reduce_permissions
from admin.users.serializers import serialize_simple_node
def serialize_node(node):
embargo = node.embargo
if embargo is not None:
embargo = node.embargo.end_date
return {
'id': node._id,
'title': node.title,
'public': node.is_public,
'parent': node.parent_id,
'root': node.root._id,
'is_registration': node.is_registration,
'date_created': node.date_created,
'withdrawn': node.is_retracted,
'embargo': embargo,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'registered_date': node.registered_date,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
'spam_data': json.dumps(node.spam_data, indent=4),
'is_public': node.is_public,
}
def serialize_simple_user_and_node_permissions(node, user):
return {
'id': user._id,
'name': user.fullname,
'permission': reduce_permissions(node.get_permissions(user))
}
|
# ... existing code ...
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'registered_date': node.registered_date,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
# ... rest of the code ...
|
f66f8b84a4092cce1f9f7e4e29a4fc483c51602f
|
whistleblower/tasks.py
|
whistleblower/tasks.py
|
import json
import logging
import os
import subprocess
from celery import Celery
from celery.schedules import crontab
from .targets.facebook_messenger import Post as MessengerPost
from .targets.twitter import Post as TwitterPost
import whistleblower.queue
HOUR = 3600
ENABLED_TARGETS = [
TwitterPost,
MessengerPost,
]
RABBITMQ_URL = os.environ.get('CLOUDAMQP_URL', 'pyamqp://guest@localhost//')
app = Celery('tasks', broker=RABBITMQ_URL)
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(4 * HOUR, process_queue.s())
@app.task
def update_suspicions_dataset():
command = ['python', 'rosie/rosie.py', 'run',
'chamber_of_deputies', 'data', '--years=2017,2016']
subprocess.run(command, check=True)
@app.task
def update_queue():
whistleblower.queue.Queue().update()
@app.task
def process_queue():
whistleblower.queue.Queue().process()
@app.task
def publish_reimbursement(reimbursement):
for target in ENABLED_TARGETS:
target(reimbursement).publish()
|
import json
import logging
import os
import subprocess
from celery import Celery
from celery.schedules import crontab
from .targets.facebook_messenger import Post as MessengerPost
from .targets.twitter import Post as TwitterPost
import whistleblower.queue
HOUR = 3600
ENABLED_TARGETS = [
TwitterPost,
MessengerPost,
]
RABBITMQ_URL = os.environ.get('CLOUDAMQP_URL', 'pyamqp://guest@localhost//')
app = Celery('tasks', broker=RABBITMQ_URL)
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(4 * HOUR, process_queue.s())
@app.task
def update_queue():
whistleblower.queue.Queue().update()
@app.task
def process_queue():
whistleblower.queue.Queue().process()
@app.task
def publish_reimbursement(reimbursement):
for target in ENABLED_TARGETS:
target(reimbursement).publish()
|
Remove task for updating data
|
Remove task for updating data
Not sure if this is the best method to have it.
Since for now is not needed, droping it from the codebase.
|
Python
|
unlicense
|
datasciencebr/whistleblower
|
python
|
## Code Before:
import json
import logging
import os
import subprocess
from celery import Celery
from celery.schedules import crontab
from .targets.facebook_messenger import Post as MessengerPost
from .targets.twitter import Post as TwitterPost
import whistleblower.queue
HOUR = 3600
ENABLED_TARGETS = [
TwitterPost,
MessengerPost,
]
RABBITMQ_URL = os.environ.get('CLOUDAMQP_URL', 'pyamqp://guest@localhost//')
app = Celery('tasks', broker=RABBITMQ_URL)
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(4 * HOUR, process_queue.s())
@app.task
def update_suspicions_dataset():
command = ['python', 'rosie/rosie.py', 'run',
'chamber_of_deputies', 'data', '--years=2017,2016']
subprocess.run(command, check=True)
@app.task
def update_queue():
whistleblower.queue.Queue().update()
@app.task
def process_queue():
whistleblower.queue.Queue().process()
@app.task
def publish_reimbursement(reimbursement):
for target in ENABLED_TARGETS:
target(reimbursement).publish()
## Instruction:
Remove task for updating data
Not sure if this is the best method to have it.
Since for now is not needed, droping it from the codebase.
## Code After:
import json
import logging
import os
import subprocess
from celery import Celery
from celery.schedules import crontab
from .targets.facebook_messenger import Post as MessengerPost
from .targets.twitter import Post as TwitterPost
import whistleblower.queue
HOUR = 3600
ENABLED_TARGETS = [
TwitterPost,
MessengerPost,
]
RABBITMQ_URL = os.environ.get('CLOUDAMQP_URL', 'pyamqp://guest@localhost//')
app = Celery('tasks', broker=RABBITMQ_URL)
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(4 * HOUR, process_queue.s())
@app.task
def update_queue():
whistleblower.queue.Queue().update()
@app.task
def process_queue():
whistleblower.queue.Queue().process()
@app.task
def publish_reimbursement(reimbursement):
for target in ENABLED_TARGETS:
target(reimbursement).publish()
|
# ... existing code ...
@app.task
def update_queue():
whistleblower.queue.Queue().update()
# ... rest of the code ...
|
d937e254ce3c806300ac7763e30bd4303661cba6
|
whaler/analysis.py
|
whaler/analysis.py
|
import os
from whaler.dataprep import IO
class Analysis():
"""
"""
def __init__(self):
self.loc = os.getcwd()
self.structs = next(os.walk('.'))[1]
print(self.loc)
print(self.structs)
def groundstates_all(self):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
# write table as groundstates.out file.
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
geometries in different spin states. Verifies convergence, and then
finds the final single-point energy for each file. Returns an array of
energies of the various spin states.
Possibilities: S T P D Q (for S = 0, 1, 2, 1/2, 3/2)
"""
|
import os
import numpy as np
from whaler.dataprep import IO
class Analysis():
"""
"""
def __init__(self):
self.loc = os.getcwd()
self.structs = next(os.walk('.'))[1]
print(self.loc)
print(self.structs)
def groundstates_all(self, outname="groundstates.csv"):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
columns = [] #turn list of rows into list of columns
# write table as groundstates.out file.
writer = IO(outname, self.loc)
headers = np.array(['Structures', 'S', 'T', 'P', 'D', 'Q'])
writer.tabulate_data(columns, headers, 'Structures')
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
geometries in different spin states. Verifies convergence, and then
finds the final single-point energy for each file. Returns an array of
energies of the various spin states.
Possibilities: S T P D Q (for S = 0, 1, 2, 1/2, 3/2)
"""
|
Set up data tabulation for gs
|
Set up data tabulation for gs
|
Python
|
mit
|
tristanbrown/whaler
|
python
|
## Code Before:
import os
from whaler.dataprep import IO
class Analysis():
"""
"""
def __init__(self):
self.loc = os.getcwd()
self.structs = next(os.walk('.'))[1]
print(self.loc)
print(self.structs)
def groundstates_all(self):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
# write table as groundstates.out file.
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
geometries in different spin states. Verifies convergence, and then
finds the final single-point energy for each file. Returns an array of
energies of the various spin states.
Possibilities: S T P D Q (for S = 0, 1, 2, 1/2, 3/2)
"""
## Instruction:
Set up data tabulation for gs
## Code After:
import os
import numpy as np
from whaler.dataprep import IO
class Analysis():
"""
"""
def __init__(self):
self.loc = os.getcwd()
self.structs = next(os.walk('.'))[1]
print(self.loc)
print(self.structs)
def groundstates_all(self, outname="groundstates.csv"):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
columns = [] #turn list of rows into list of columns
# write table as groundstates.out file.
writer = IO(outname, self.loc)
headers = np.array(['Structures', 'S', 'T', 'P', 'D', 'Q'])
writer.tabulate_data(columns, headers, 'Structures')
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
geometries in different spin states. Verifies convergence, and then
finds the final single-point energy for each file. Returns an array of
energies of the various spin states.
Possibilities: S T P D Q (for S = 0, 1, 2, 1/2, 3/2)
"""
|
# ... existing code ...
import os
import numpy as np
from whaler.dataprep import IO
class Analysis():
# ... modified code ...
print(self.loc)
print(self.structs)
def groundstates_all(self, outname="groundstates.csv"):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
columns = [] #turn list of rows into list of columns
# write table as groundstates.out file.
writer = IO(outname, self.loc)
headers = np.array(['Structures', 'S', 'T', 'P', 'D', 'Q'])
writer.tabulate_data(columns, headers, 'Structures')
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
# ... rest of the code ...
|
a7089ed884ed2227fc45671eda3fe979e43545b9
|
tests/files/transpiler/base_class.h
|
tests/files/transpiler/base_class.h
|
class BaseAutoFunction {
public:
bool Init(CitrusRobot *robot, std::vector<void *>);
bool Periodic(CitrusRobot *robot, std::vector<void *>);
private:
//none
};
#endif
|
class CitrusRobot;
class BaseAutoFunction {
public:
bool Init(CitrusRobot *robot, std::vector<void *>);
bool Periodic(CitrusRobot *robot, std::vector<void *>);
private:
//none
};
#endif
|
Fix compilation issue in tests by forward-declaring CitrusRobot
|
Fix compilation issue in tests by forward-declaring CitrusRobot
|
C
|
mit
|
WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler
|
c
|
## Code Before:
class BaseAutoFunction {
public:
bool Init(CitrusRobot *robot, std::vector<void *>);
bool Periodic(CitrusRobot *robot, std::vector<void *>);
private:
//none
};
#endif
## Instruction:
Fix compilation issue in tests by forward-declaring CitrusRobot
## Code After:
class CitrusRobot;
class BaseAutoFunction {
public:
bool Init(CitrusRobot *robot, std::vector<void *>);
bool Periodic(CitrusRobot *robot, std::vector<void *>);
private:
//none
};
#endif
|
// ... existing code ...
class CitrusRobot;
class BaseAutoFunction {
public:
// ... rest of the code ...
|
cfd41cfd0bd199672449db88d0502d37131a5c1f
|
test/Sema/struct-decl.c
|
test/Sema/struct-decl.c
|
// RUN: %clang_cc1 -fsyntax-only -verify %s
// PR3459
struct bar {
char n[1];
};
struct foo {
char name[(int)&((struct bar *)0)->n];
char name2[(int)&((struct bar *)0)->n - 1]; //expected-error{{array size is negative}}
};
// PR3430
struct s {
struct st {
int v;
} *ts;
};
struct st;
int foo() {
struct st *f;
return f->v + f[0].v;
}
// PR3642, PR3671
struct pppoe_tag {
short tag_type;
char tag_data[];
};
struct datatag {
struct pppoe_tag hdr; //expected-warning{{field 'hdr' with variable sized type 'struct pppoe_tag' not at the end of a struct or class is a GNU extension}}
char data;
};
// PR4092
struct s0 {
char a; // expected-note {{previous declaration is here}}
char a; // expected-error {{duplicate member 'a'}}
};
struct s0 f0(void) {}
|
// RUN: %clang_cc1 -fsyntax-only -verify %s
// PR3459
struct bar {
char n[1];
};
struct foo {
char name[(int)&((struct bar *)0)->n];
char name2[(int)&((struct bar *)0)->n - 1]; //expected-error{{array size is negative}}
};
// PR3430
struct s {
struct st {
int v;
} *ts;
};
struct st;
int foo() {
struct st *f;
return f->v + f[0].v;
}
// PR3642, PR3671
struct pppoe_tag {
short tag_type;
char tag_data[];
};
struct datatag {
struct pppoe_tag hdr; //expected-warning{{field 'hdr' with variable sized type 'struct pppoe_tag' not at the end of a struct or class is a GNU extension}}
char data;
};
// PR4092
struct s0 {
char a; // expected-note {{previous declaration is here}}
char a; // expected-error {{duplicate member 'a'}}
};
struct s0 f0(void) {}
// <rdar://problem/8177927> - This previously triggered an assertion failure.
struct x0 {
unsigned int x1;
};
|
Add test case for <rdar://problem/8177927> (which triggered an assertion failure in SemaChecking).
|
Add test case for <rdar://problem/8177927> (which triggered an assertion failure in SemaChecking).
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@108159 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -fsyntax-only -verify %s
// PR3459
struct bar {
char n[1];
};
struct foo {
char name[(int)&((struct bar *)0)->n];
char name2[(int)&((struct bar *)0)->n - 1]; //expected-error{{array size is negative}}
};
// PR3430
struct s {
struct st {
int v;
} *ts;
};
struct st;
int foo() {
struct st *f;
return f->v + f[0].v;
}
// PR3642, PR3671
struct pppoe_tag {
short tag_type;
char tag_data[];
};
struct datatag {
struct pppoe_tag hdr; //expected-warning{{field 'hdr' with variable sized type 'struct pppoe_tag' not at the end of a struct or class is a GNU extension}}
char data;
};
// PR4092
struct s0 {
char a; // expected-note {{previous declaration is here}}
char a; // expected-error {{duplicate member 'a'}}
};
struct s0 f0(void) {}
## Instruction:
Add test case for <rdar://problem/8177927> (which triggered an assertion failure in SemaChecking).
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@108159 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -fsyntax-only -verify %s
// PR3459
struct bar {
char n[1];
};
struct foo {
char name[(int)&((struct bar *)0)->n];
char name2[(int)&((struct bar *)0)->n - 1]; //expected-error{{array size is negative}}
};
// PR3430
struct s {
struct st {
int v;
} *ts;
};
struct st;
int foo() {
struct st *f;
return f->v + f[0].v;
}
// PR3642, PR3671
struct pppoe_tag {
short tag_type;
char tag_data[];
};
struct datatag {
struct pppoe_tag hdr; //expected-warning{{field 'hdr' with variable sized type 'struct pppoe_tag' not at the end of a struct or class is a GNU extension}}
char data;
};
// PR4092
struct s0 {
char a; // expected-note {{previous declaration is here}}
char a; // expected-error {{duplicate member 'a'}}
};
struct s0 f0(void) {}
// <rdar://problem/8177927> - This previously triggered an assertion failure.
struct x0 {
unsigned int x1;
};
|
...
};
struct s0 f0(void) {}
// <rdar://problem/8177927> - This previously triggered an assertion failure.
struct x0 {
unsigned int x1;
};
...
|
9dc756e47c54c799b23bde1de7a93d9209e83072
|
catpeds-rest/src/main/java/com/catpeds/Application.java
|
catpeds-rest/src/main/java/com/catpeds/Application.java
|
package com.catpeds;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Main class for the standalone Spring Boot application execution.
*
* @author padriano
*
*/
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
package com.catpeds;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Main class for the standalone Spring Boot application execution.
*
* @author padriano
*
*/
@SpringBootApplication
public class Application {
private Application() { }
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
Fix code smell for non instantiation
|
Fix code smell for non instantiation
|
Java
|
mit
|
padriano/catpeds,padriano/catpeds,padriano/catpeds
|
java
|
## Code Before:
package com.catpeds;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Main class for the standalone Spring Boot application execution.
*
* @author padriano
*
*/
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
## Instruction:
Fix code smell for non instantiation
## Code After:
package com.catpeds;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Main class for the standalone Spring Boot application execution.
*
* @author padriano
*
*/
@SpringBootApplication
public class Application {
private Application() { }
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
# ... existing code ...
@SpringBootApplication
public class Application {
private Application() { }
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
# ... rest of the code ...
|
6b4e9e5f6f537c84a53847e0cdf07edefafb9abe
|
app/src/main/kotlin/com/felipecosta/microservice/app/core/di/DataModule.kt
|
app/src/main/kotlin/com/felipecosta/microservice/app/core/di/DataModule.kt
|
package com.felipecosta.microservice.app.core.di
import com.felipecosta.microservice.app.core.data.StubMovieRepository
import com.felipecosta.microservice.app.core.domain.MoviesRepository
import dagger.Module
import dagger.Provides
import javax.inject.Singleton
@Module
class DataModule {
@Singleton
@Provides
fun provideMoviesRepository(): MoviesRepository = StubMovieRepository()
}
|
package com.felipecosta.microservice.app.core.di
import com.felipecosta.microservice.app.core.data.RedisMoviesRepository
import com.felipecosta.microservice.app.core.domain.MoviesRepository
import dagger.Module
import dagger.Provides
import javax.inject.Singleton
@Module
class DataModule {
companion object {
const val redisUri = "h:p4d9bad74864deada66ebed2e832c6d6bf2de394afef54902351c836ae9850e0e@ec2-54-227-223-104.compute-1.amazonaws.com:60759"
}
@Singleton
@Provides
fun provideMoviesRepository(): MoviesRepository = RedisMoviesRepository(redisUri)
}
|
Replace stub repository by redis implementation
|
Replace stub repository by redis implementation
|
Kotlin
|
mit
|
fcostaa/kotlin-microservice,fcostaa/kotlin-microservice,fcostaa/kotlin-microservice
|
kotlin
|
## Code Before:
package com.felipecosta.microservice.app.core.di
import com.felipecosta.microservice.app.core.data.StubMovieRepository
import com.felipecosta.microservice.app.core.domain.MoviesRepository
import dagger.Module
import dagger.Provides
import javax.inject.Singleton
@Module
class DataModule {
@Singleton
@Provides
fun provideMoviesRepository(): MoviesRepository = StubMovieRepository()
}
## Instruction:
Replace stub repository by redis implementation
## Code After:
package com.felipecosta.microservice.app.core.di
import com.felipecosta.microservice.app.core.data.RedisMoviesRepository
import com.felipecosta.microservice.app.core.domain.MoviesRepository
import dagger.Module
import dagger.Provides
import javax.inject.Singleton
@Module
class DataModule {
companion object {
const val redisUri = "h:p4d9bad74864deada66ebed2e832c6d6bf2de394afef54902351c836ae9850e0e@ec2-54-227-223-104.compute-1.amazonaws.com:60759"
}
@Singleton
@Provides
fun provideMoviesRepository(): MoviesRepository = RedisMoviesRepository(redisUri)
}
|
// ... existing code ...
package com.felipecosta.microservice.app.core.di
import com.felipecosta.microservice.app.core.data.RedisMoviesRepository
import com.felipecosta.microservice.app.core.domain.MoviesRepository
import dagger.Module
import dagger.Provides
// ... modified code ...
@Module
class DataModule {
companion object {
const val redisUri = "h:p4d9bad74864deada66ebed2e832c6d6bf2de394afef54902351c836ae9850e0e@ec2-54-227-223-104.compute-1.amazonaws.com:60759"
}
@Singleton
@Provides
fun provideMoviesRepository(): MoviesRepository = RedisMoviesRepository(redisUri)
}
// ... rest of the code ...
|
69d0cf6cc0d19f1669f56a361447935e375ac05c
|
indico/modules/events/logs/views.py
|
indico/modules/events/logs/views.py
|
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
Include SUIR JS on logs page
|
Include SUIR JS on logs page
This is not pretty, as we don't even use SUIR there, but
indico/utils/redux imports a module that imports SUIR and thus breaks
the logs page if SUIR is not included.
|
Python
|
mit
|
DirkHoffmann/indico,indico/indico,OmeGak/indico,pferreir/indico,ThiefMaster/indico,ThiefMaster/indico,DirkHoffmann/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,indico/indico,OmeGak/indico,mvidalgarcia/indico,indico/indico,pferreir/indico,ThiefMaster/indico,mic4ael/indico,indico/indico,DirkHoffmann/indico,mic4ael/indico,pferreir/indico,OmeGak/indico,ThiefMaster/indico,DirkHoffmann/indico,OmeGak/indico,mvidalgarcia/indico,mic4ael/indico,mic4ael/indico
|
python
|
## Code Before:
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
## Instruction:
Include SUIR JS on logs page
This is not pretty, as we don't even use SUIR there, but
indico/utils/redux imports a module that imports SUIR and thus breaks
the logs page if SUIR is not included.
## Code After:
from __future__ import unicode_literals
from indico.modules.events.management.views import WPEventManagement
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
|
...
class WPEventLogs(WPEventManagement):
bundles = ('react.js', 'semantic-ui.js', 'module_events.logs.js', 'module_events.logs.css')
template_prefix = 'events/logs/'
sidemenu_option = 'logs'
...
|
8b7df2f297fde16525821a14755c870c290850af
|
salt/thorium/runner.py
|
salt/thorium/runner.py
|
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
Fix local opts from CLI
|
Fix local opts from CLI
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
python
|
## Code Before:
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
## Instruction:
Fix local opts from CLI
## Code After:
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
// ... existing code ...
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
// ... rest of the code ...
|
ce2df60e506865768215d05489c5a52cbb6febcb
|
Classes/JLNRGravityImageView.h
|
Classes/JLNRGravityImageView.h
|
//
// JLNRGravityImageView.h
// JLNRGravityImageViewExample
//
// Created by Julian Raschke on 17.02.15.
// Copyright (c) 2015 Julian Raschke. All rights reserved.
//
#import <UIKit/UIKit.h>
IB_DESIGNABLE
@interface JLNRGravityImageView : UIImageView
@property (nonatomic) IBInspectable BOOL alignTop;
@property (nonatomic) IBInspectable BOOL alignBottom;
@property (nonatomic) IBInspectable BOOL alignLeft;
@property (nonatomic) IBInspectable BOOL alignRight;
@end
|
//
// JLNRGravityImageView.h
// JLNRGravityImageViewExample
//
// Created by Julian Raschke on 17.02.15.
// Copyright (c) 2015 Julian Raschke. All rights reserved.
//
#import <UIKit/UIKit.h>
// IB_DESIGNABLE does not work by default with CocoaPods:
// https://github.com/CocoaPods/CocoaPods/issues/2792
#ifndef COCOAPODS
IB_DESIGNABLE
#endif
@interface JLNRGravityImageView : UIImageView
@property (nonatomic) IBInspectable BOOL alignTop;
@property (nonatomic) IBInspectable BOOL alignBottom;
@property (nonatomic) IBInspectable BOOL alignLeft;
@property (nonatomic) IBInspectable BOOL alignRight;
@end
|
Disable IB_DESIGNABLE when using CocoaPods
|
Disable IB_DESIGNABLE when using CocoaPods
|
C
|
mit
|
jlnr/JLNRGravityImageView,lurado/LDOAlignedImageView
|
c
|
## Code Before:
//
// JLNRGravityImageView.h
// JLNRGravityImageViewExample
//
// Created by Julian Raschke on 17.02.15.
// Copyright (c) 2015 Julian Raschke. All rights reserved.
//
#import <UIKit/UIKit.h>
IB_DESIGNABLE
@interface JLNRGravityImageView : UIImageView
@property (nonatomic) IBInspectable BOOL alignTop;
@property (nonatomic) IBInspectable BOOL alignBottom;
@property (nonatomic) IBInspectable BOOL alignLeft;
@property (nonatomic) IBInspectable BOOL alignRight;
@end
## Instruction:
Disable IB_DESIGNABLE when using CocoaPods
## Code After:
//
// JLNRGravityImageView.h
// JLNRGravityImageViewExample
//
// Created by Julian Raschke on 17.02.15.
// Copyright (c) 2015 Julian Raschke. All rights reserved.
//
#import <UIKit/UIKit.h>
// IB_DESIGNABLE does not work by default with CocoaPods:
// https://github.com/CocoaPods/CocoaPods/issues/2792
#ifndef COCOAPODS
IB_DESIGNABLE
#endif
@interface JLNRGravityImageView : UIImageView
@property (nonatomic) IBInspectable BOOL alignTop;
@property (nonatomic) IBInspectable BOOL alignBottom;
@property (nonatomic) IBInspectable BOOL alignLeft;
@property (nonatomic) IBInspectable BOOL alignRight;
@end
|
...
#import <UIKit/UIKit.h>
// IB_DESIGNABLE does not work by default with CocoaPods:
// https://github.com/CocoaPods/CocoaPods/issues/2792
#ifndef COCOAPODS
IB_DESIGNABLE
#endif
@interface JLNRGravityImageView : UIImageView
@property (nonatomic) IBInspectable BOOL alignTop;
...
|
c30bd67d4fc1773ce8b0752d8e4a7cc00e2a7ae4
|
app/forms.py
|
app/forms.py
|
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField
from wtforms.validators import DataRequired
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
|
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validation=[DataRequired()])
about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
|
Define the edit profile form
|
Define the edit profile form
|
Python
|
mit
|
ddayguerrero/blogme,ddayguerrero/blogme,ddayguerrero/blogme,ddayguerrero/blogme,ddayguerrero/blogme
|
python
|
## Code Before:
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField
from wtforms.validators import DataRequired
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
## Instruction:
Define the edit profile form
## Code After:
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validation=[DataRequired()])
about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
|
...
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validation=[DataRequired()])
about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
...
|
2745d1a2e06566cda657ffb740942994448a6c58
|
src/main/java/com/grayben/riskExtractor/htmlScorer/elementScorers/ElementScorer.java
|
src/main/java/com/grayben/riskExtractor/htmlScorer/elementScorers/ElementScorer.java
|
package com.grayben.riskExtractor.htmlScorer.elementScorers;
/**
* Element scorer interface. Provide an implementing class to {@link ScoringAndFlatteningVisitor}.
* <p/>
* This interface contains a single method that takes an {@link org.jsoup.nodes.Element} and returns a score.
* @author beng
*
*/
public interface ElementScorer<T> {
/**
*
* @param input the Element to score
* @return the score of element
*/
public int score(T input);
}
|
package com.grayben.riskExtractor.htmlScorer.elementScorers;
import org.jsoup.nodes.Element;
/**
* Element scorer interface. Provide an implementing class to {@link ScoringAndFlatteningVisitor}.
* <p/>
* This interface contains a single method that takes an {@link org.jsoup.nodes.Element} and returns a score.
* @author beng
*
*/
public interface ElementScorer {
/**
*
* @param input the Element to score
* @return the score of element
*/
public int score(Element input);
}
|
Remove generic type, use com.jsoup.nodes.Element type
|
Remove generic type, use com.jsoup.nodes.Element type
|
Java
|
mit
|
grayben/10K-item-extractor,grayben/10K-item-extractor
|
java
|
## Code Before:
package com.grayben.riskExtractor.htmlScorer.elementScorers;
/**
* Element scorer interface. Provide an implementing class to {@link ScoringAndFlatteningVisitor}.
* <p/>
* This interface contains a single method that takes an {@link org.jsoup.nodes.Element} and returns a score.
* @author beng
*
*/
public interface ElementScorer<T> {
/**
*
* @param input the Element to score
* @return the score of element
*/
public int score(T input);
}
## Instruction:
Remove generic type, use com.jsoup.nodes.Element type
## Code After:
package com.grayben.riskExtractor.htmlScorer.elementScorers;
import org.jsoup.nodes.Element;
/**
* Element scorer interface. Provide an implementing class to {@link ScoringAndFlatteningVisitor}.
* <p/>
* This interface contains a single method that takes an {@link org.jsoup.nodes.Element} and returns a score.
* @author beng
*
*/
public interface ElementScorer {
/**
*
* @param input the Element to score
* @return the score of element
*/
public int score(Element input);
}
|
...
package com.grayben.riskExtractor.htmlScorer.elementScorers;
import org.jsoup.nodes.Element;
/**
* Element scorer interface. Provide an implementing class to {@link ScoringAndFlatteningVisitor}.
...
* @author beng
*
*/
public interface ElementScorer {
/**
*
...
* @param input the Element to score
* @return the score of element
*/
public int score(Element input);
}
...
|
cc12728d7160a10f0c182c0cccfde0fd15cadb75
|
spicedham/basewrapper.py
|
spicedham/basewrapper.py
|
class BaseWrapper(object):
"""
A base class for backend plugins.
"""
def get_key(self, tag, key, default=None):
"""
Gets the value held by the tag, key composite key. If it doesn't exist,
return default.
"""
raise NotImplementedError()
def get_key_list(self, tag, keys, default=None):
"""
Given a list of key, tag tuples get all values.
If key, tag doesn't exist, return default.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.get_key(tag, key, default) for tag, key in key_tag_pairs]
def set_key_list(self, tag, key_value_tuples):
"""
Given a list of tuples of tag, key, value set them all.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.set_key(tag, key, value) for tag, key, value in tag_key_value_tuples]
def set_key(self, tag, key, value):
"""
Set the value held by the tag, key composite key.
"""
raise NotImplementedError()
|
class BaseWrapper(object):
"""
A base class for backend plugins.
"""
def reset(self, really):
"""
Resets the training data to a blank slate.
"""
if really:
raise NotImplementedError()
def get_key(self, tag, key, default=None):
"""
Gets the value held by the tag, key composite key. If it doesn't exist,
return default.
"""
raise NotImplementedError()
def get_key_list(self, tag, keys, default=None):
"""
Given a list of key, tag tuples get all values.
If key, tag doesn't exist, return default.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.get_key(tag, key, default) for tag, key in key_tag_pairs]
def set_key_list(self, tag_key_value_tuples):
"""
Given a list of tuples of tag, key, value set them all.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.set_key(tag, key, value) for tag, key, value in tag_key_value_tuples]
def set_key(self, tag, key, value):
"""
Set the value held by the tag, key composite key.
"""
raise NotImplementedError()
|
Add a reset function stub
|
Add a reset function stub
Also fix a typo.
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
python
|
## Code Before:
class BaseWrapper(object):
"""
A base class for backend plugins.
"""
def get_key(self, tag, key, default=None):
"""
Gets the value held by the tag, key composite key. If it doesn't exist,
return default.
"""
raise NotImplementedError()
def get_key_list(self, tag, keys, default=None):
"""
Given a list of key, tag tuples get all values.
If key, tag doesn't exist, return default.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.get_key(tag, key, default) for tag, key in key_tag_pairs]
def set_key_list(self, tag, key_value_tuples):
"""
Given a list of tuples of tag, key, value set them all.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.set_key(tag, key, value) for tag, key, value in tag_key_value_tuples]
def set_key(self, tag, key, value):
"""
Set the value held by the tag, key composite key.
"""
raise NotImplementedError()
## Instruction:
Add a reset function stub
Also fix a typo.
## Code After:
class BaseWrapper(object):
"""
A base class for backend plugins.
"""
def reset(self, really):
"""
Resets the training data to a blank slate.
"""
if really:
raise NotImplementedError()
def get_key(self, tag, key, default=None):
"""
Gets the value held by the tag, key composite key. If it doesn't exist,
return default.
"""
raise NotImplementedError()
def get_key_list(self, tag, keys, default=None):
"""
Given a list of key, tag tuples get all values.
If key, tag doesn't exist, return default.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.get_key(tag, key, default) for tag, key in key_tag_pairs]
def set_key_list(self, tag_key_value_tuples):
"""
Given a list of tuples of tag, key, value set them all.
Subclasses can override this to make more efficient queries for bulk
requests.
"""
return [self.set_key(tag, key, value) for tag, key, value in tag_key_value_tuples]
def set_key(self, tag, key, value):
"""
Set the value held by the tag, key composite key.
"""
raise NotImplementedError()
|
# ... existing code ...
"""
A base class for backend plugins.
"""
def reset(self, really):
"""
Resets the training data to a blank slate.
"""
if really:
raise NotImplementedError()
def get_key(self, tag, key, default=None):
"""
# ... modified code ...
return [self.get_key(tag, key, default) for tag, key in key_tag_pairs]
def set_key_list(self, tag_key_value_tuples):
"""
Given a list of tuples of tag, key, value set them all.
Subclasses can override this to make more efficient queries for bulk
# ... rest of the code ...
|
b30b294d1c079e18068f8ddef7acfa2713cc427f
|
hat-trie/src/common.h
|
hat-trie/src/common.h
|
/*
* This file is part of hat-trie.
*
* Copyright (c) 2011 by Daniel C. Jones <[email protected]>
*
*
* Common typedefs, etc.
*
*/
#ifndef HATTRIE_COMMON_H
#define HATTRIE_COMMON_H
typedef unsigned long value_t;
#endif
|
/*
* This file is part of hat-trie.
*
* Copyright (c) 2011 by Daniel C. Jones <[email protected]>
*
*
* Common typedefs, etc.
*
*/
#ifndef HATTRIE_COMMON_H
#define HATTRIE_COMMON_H
#include "pstdint.h"
typedef uintptr_t value_t;
#endif
|
Update hat-trie C library. Fix GH-11.
|
Update hat-trie C library. Fix GH-11.
|
C
|
mit
|
kmike/hat-trie,kmike/hat-trie,kmike/hat-trie,kmike/hat-trie
|
c
|
## Code Before:
/*
* This file is part of hat-trie.
*
* Copyright (c) 2011 by Daniel C. Jones <[email protected]>
*
*
* Common typedefs, etc.
*
*/
#ifndef HATTRIE_COMMON_H
#define HATTRIE_COMMON_H
typedef unsigned long value_t;
#endif
## Instruction:
Update hat-trie C library. Fix GH-11.
## Code After:
/*
* This file is part of hat-trie.
*
* Copyright (c) 2011 by Daniel C. Jones <[email protected]>
*
*
* Common typedefs, etc.
*
*/
#ifndef HATTRIE_COMMON_H
#define HATTRIE_COMMON_H
#include "pstdint.h"
typedef uintptr_t value_t;
#endif
|
...
#ifndef HATTRIE_COMMON_H
#define HATTRIE_COMMON_H
#include "pstdint.h"
typedef uintptr_t value_t;
#endif
...
|
c1785e0713a5af6b849baaa1b314a13ac777f3f5
|
tests/test_str_py3.py
|
tests/test_str_py3.py
|
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
import fastavro
from fastavro.compat import BytesIO
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
|
"""Python3 string tests for fastavro"""
from __future__ import absolute_import
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import fastavro
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
|
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually.
|
Test files shouldn't import 'fastavro.compat'. Just import BytesIO
manually.
|
Python
|
mit
|
e-heller/fastavro,e-heller/fastavro
|
python
|
## Code Before:
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
import fastavro
from fastavro.compat import BytesIO
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
## Instruction:
Test files shouldn't import 'fastavro.compat'. Just import BytesIO
manually.
## Code After:
"""Python3 string tests for fastavro"""
from __future__ import absolute_import
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import fastavro
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
|
// ... existing code ...
"""Python3 string tests for fastavro"""
from __future__ import absolute_import
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import fastavro
letters = ascii_uppercase + digits
id_size = 100
// ... modified code ...
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
// ... rest of the code ...
|
c671f3f04668b6b1fea4bc2836f966975be20289
|
aztec/src/main/kotlin/org/wordpress/aztec/spans/IAztecAttributedSpan.kt
|
aztec/src/main/kotlin/org/wordpress/aztec/spans/IAztecAttributedSpan.kt
|
package org.wordpress.aztec.spans
import android.text.Editable
import org.wordpress.aztec.AztecAttributes
import org.wordpress.aztec.source.InlineCssStyleFormatter
interface IAztecAttributedSpan {
var attributes: AztecAttributes
/**
* Parses and applies the HTML 'style' attribute.
* @param output An [Editable] containing an [IAztecAttributedSpan] for processing.
* @param start The index where the [IAztecAttributedSpan] starts inside the [text]
*/
fun applyInlineStyleAttributes(output: Editable, start: Int, end: Int) {
val attr = this.attributes
if (attr.hasAttribute("style")) {
InlineCssStyleFormatter.applyInlineStyleAttributes(output, attr, start, end)
}
}
}
|
package org.wordpress.aztec.spans
import android.text.Editable
import org.wordpress.aztec.AztecAttributes
import org.wordpress.aztec.source.InlineCssStyleFormatter
interface IAztecAttributedSpan {
var attributes: AztecAttributes
/**
* Parses and applies the HTML 'style' attribute.
* @param output An [Editable] containing an [IAztecAttributedSpan] for processing.
* @param start The index where the [IAztecAttributedSpan] starts inside the [text]
*/
fun applyInlineStyleAttributes(output: Editable, start: Int, end: Int) {
val attr = this.attributes
if (attr.hasAttribute(InlineCssStyleFormatter.STYLE_ATTRIBUTE)) {
InlineCssStyleFormatter.applyInlineStyleAttributes(output, attr, start, end)
}
}
}
|
Use a declared constant instead of "style" literal
|
Use a declared constant instead of "style" literal
|
Kotlin
|
mpl-2.0
|
wordpress-mobile/AztecEditor-Android,wordpress-mobile/AztecEditor-Android,wordpress-mobile/AztecEditor-Android
|
kotlin
|
## Code Before:
package org.wordpress.aztec.spans
import android.text.Editable
import org.wordpress.aztec.AztecAttributes
import org.wordpress.aztec.source.InlineCssStyleFormatter
interface IAztecAttributedSpan {
var attributes: AztecAttributes
/**
* Parses and applies the HTML 'style' attribute.
* @param output An [Editable] containing an [IAztecAttributedSpan] for processing.
* @param start The index where the [IAztecAttributedSpan] starts inside the [text]
*/
fun applyInlineStyleAttributes(output: Editable, start: Int, end: Int) {
val attr = this.attributes
if (attr.hasAttribute("style")) {
InlineCssStyleFormatter.applyInlineStyleAttributes(output, attr, start, end)
}
}
}
## Instruction:
Use a declared constant instead of "style" literal
## Code After:
package org.wordpress.aztec.spans
import android.text.Editable
import org.wordpress.aztec.AztecAttributes
import org.wordpress.aztec.source.InlineCssStyleFormatter
interface IAztecAttributedSpan {
var attributes: AztecAttributes
/**
* Parses and applies the HTML 'style' attribute.
* @param output An [Editable] containing an [IAztecAttributedSpan] for processing.
* @param start The index where the [IAztecAttributedSpan] starts inside the [text]
*/
fun applyInlineStyleAttributes(output: Editable, start: Int, end: Int) {
val attr = this.attributes
if (attr.hasAttribute(InlineCssStyleFormatter.STYLE_ATTRIBUTE)) {
InlineCssStyleFormatter.applyInlineStyleAttributes(output, attr, start, end)
}
}
}
|
...
*/
fun applyInlineStyleAttributes(output: Editable, start: Int, end: Int) {
val attr = this.attributes
if (attr.hasAttribute(InlineCssStyleFormatter.STYLE_ATTRIBUTE)) {
InlineCssStyleFormatter.applyInlineStyleAttributes(output, attr, start, end)
}
}
...
|
9eea896352e62850494dbb3f894eef0b82afab89
|
needy/projects/boostbuild.py
|
needy/projects/boostbuild.py
|
import os
import subprocess
from .. import project
class BoostBuildProject(project.Project):
@staticmethod
def identifier():
return 'boostbuild'
@staticmethod
def is_valid_project(definition, needy):
if not definition.target.platform.is_host():
return False
if not os.path.isfile('Jamroot'):
return False
if os.path.isfile('b2'):
return True
try:
needy.command_output(['b2', '-v'])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
@staticmethod
def configuration_keys():
return ['b2-args']
def get_build_concurrency_args(self):
concurrency = self.build_concurrency()
if concurrency > 1:
return ['-j', str(concurrency)]
elif concurrency == 0:
return ['-j']
return []
def build(self, output_directory):
b2 = './b2' if os.path.isfile('b2') else 'b2'
b2_args = self.evaluate(self.configuration('b2-args'))
b2_args += self.get_build_concurrency_args()
self.command([b2] + b2_args)
self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
|
import os
import subprocess
from .. import project
class BoostBuildProject(project.Project):
@staticmethod
def identifier():
return 'boostbuild'
@staticmethod
def is_valid_project(definition, needy):
if not definition.target.platform.is_host():
return False
if not os.path.isfile('Jamroot'):
return False
if os.path.isfile('b2'):
return True
try:
needy.command_output(['b2', '-v'])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
@staticmethod
def configuration_keys():
return ['b2-args']
def get_build_concurrency_args(self):
concurrency = self.build_concurrency()
if concurrency > 1:
return ['-j', str(concurrency)]
elif concurrency == 0:
return ['-j']
return []
def build(self, output_directory):
b2 = './b2' if os.path.isfile('b2') else 'b2'
b2_args = self.evaluate(self.configuration('b2-args'))
b2_args += self.get_build_concurrency_args()
if self.configuration('linkage') in ['static']:
b2_args += ['link=static']
elif self.configuration('linkage') in ['dynamic', 'shared']:
b2_args += ['link=shared']
self.command([b2] + b2_args)
self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
|
Add support for linkage in b2 projects
|
Add support for linkage in b2 projects
|
Python
|
mit
|
vmrob/needy,ccbrown/needy,vmrob/needy,bittorrent/needy,bittorrent/needy,ccbrown/needy
|
python
|
## Code Before:
import os
import subprocess
from .. import project
class BoostBuildProject(project.Project):
@staticmethod
def identifier():
return 'boostbuild'
@staticmethod
def is_valid_project(definition, needy):
if not definition.target.platform.is_host():
return False
if not os.path.isfile('Jamroot'):
return False
if os.path.isfile('b2'):
return True
try:
needy.command_output(['b2', '-v'])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
@staticmethod
def configuration_keys():
return ['b2-args']
def get_build_concurrency_args(self):
concurrency = self.build_concurrency()
if concurrency > 1:
return ['-j', str(concurrency)]
elif concurrency == 0:
return ['-j']
return []
def build(self, output_directory):
b2 = './b2' if os.path.isfile('b2') else 'b2'
b2_args = self.evaluate(self.configuration('b2-args'))
b2_args += self.get_build_concurrency_args()
self.command([b2] + b2_args)
self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
## Instruction:
Add support for linkage in b2 projects
## Code After:
import os
import subprocess
from .. import project
class BoostBuildProject(project.Project):
@staticmethod
def identifier():
return 'boostbuild'
@staticmethod
def is_valid_project(definition, needy):
if not definition.target.platform.is_host():
return False
if not os.path.isfile('Jamroot'):
return False
if os.path.isfile('b2'):
return True
try:
needy.command_output(['b2', '-v'])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
@staticmethod
def configuration_keys():
return ['b2-args']
def get_build_concurrency_args(self):
concurrency = self.build_concurrency()
if concurrency > 1:
return ['-j', str(concurrency)]
elif concurrency == 0:
return ['-j']
return []
def build(self, output_directory):
b2 = './b2' if os.path.isfile('b2') else 'b2'
b2_args = self.evaluate(self.configuration('b2-args'))
b2_args += self.get_build_concurrency_args()
if self.configuration('linkage') in ['static']:
b2_args += ['link=static']
elif self.configuration('linkage') in ['dynamic', 'shared']:
b2_args += ['link=shared']
self.command([b2] + b2_args)
self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
|
// ... existing code ...
b2 = './b2' if os.path.isfile('b2') else 'b2'
b2_args = self.evaluate(self.configuration('b2-args'))
b2_args += self.get_build_concurrency_args()
if self.configuration('linkage') in ['static']:
b2_args += ['link=static']
elif self.configuration('linkage') in ['dynamic', 'shared']:
b2_args += ['link=shared']
self.command([b2] + b2_args)
self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
// ... rest of the code ...
|
add6013c8484e56545ed2f11c8c6e042c1384429
|
swf/exceptions.py
|
swf/exceptions.py
|
class PollTimeout(Exception):
pass
class InvalidCredentialsError(Exception):
pass
class ResponseError(Exception):
pass
class DoesNotExistError(Exception):
pass
class AlreadyExistsError(Exception):
pass
class InvalidKeywordArgumentError(Exception):
pass
|
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
Enhance swf errors wrapping via an exception helper
|
Enhance swf errors wrapping via an exception helper
|
Python
|
mit
|
botify-labs/python-simple-workflow,botify-labs/python-simple-workflow
|
python
|
## Code Before:
class PollTimeout(Exception):
pass
class InvalidCredentialsError(Exception):
pass
class ResponseError(Exception):
pass
class DoesNotExistError(Exception):
pass
class AlreadyExistsError(Exception):
pass
class InvalidKeywordArgumentError(Exception):
pass
## Instruction:
Enhance swf errors wrapping via an exception helper
## Code After:
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
// ... existing code ...
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
// ... rest of the code ...
|
a8a56f20dd76f61ec1ea6e99037490922d5cbcb1
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='grammpy',
version='1.1.1',
packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'],
url='https://github.com/PatrikValkovic/grammpy',
license='GNU General Public License v3.0',
author='Patrik Valkovic',
download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz',
author_email='[email protected]',
description='Package for representing formal grammars.'
)
|
from distutils.core import setup
setup(
name='grammpy',
version='1.1.1',
packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'],
url='https://github.com/PatrikValkovic/grammpy',
license='GNU General Public License v3.0',
author='Patrik Valkovic',
download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz',
author_email='[email protected]',
description='Package for representing formal grammars.'
)
|
FIX missing Rules directory in package
|
FIX missing Rules directory in package
|
Python
|
mit
|
PatrikValkovic/grammpy
|
python
|
## Code Before:
from distutils.core import setup
setup(
name='grammpy',
version='1.1.1',
packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'],
url='https://github.com/PatrikValkovic/grammpy',
license='GNU General Public License v3.0',
author='Patrik Valkovic',
download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz',
author_email='[email protected]',
description='Package for representing formal grammars.'
)
## Instruction:
FIX missing Rules directory in package
## Code After:
from distutils.core import setup
setup(
name='grammpy',
version='1.1.1',
packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'],
url='https://github.com/PatrikValkovic/grammpy',
license='GNU General Public License v3.0',
author='Patrik Valkovic',
download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz',
author_email='[email protected]',
description='Package for representing formal grammars.'
)
|
# ... existing code ...
setup(
name='grammpy',
version='1.1.1',
packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'],
url='https://github.com/PatrikValkovic/grammpy',
license='GNU General Public License v3.0',
author='Patrik Valkovic',
# ... rest of the code ...
|
f3d3750986a8710c54c110c43c00fa152dbbd383
|
src/hades/bin/su.py
|
src/hades/bin/su.py
|
import grp
import logging
import os
import pwd
import sys
from hades.common.cli import (
ArgumentParser, parser as common_parser, setup_cli_logging,
)
logger = logging.getLogger(__name__)
def drop_privileges(passwd, group):
if os.geteuid() != 0:
logger.error("Can't drop privileges (EUID != 0)")
return
os.setgid(group.gr_gid)
os.initgroups(passwd.pw_name, group.gr_gid)
os.setuid(passwd.pw_uid)
def main():
parser = ArgumentParser(parents=[common_parser])
parser.add_argument('user')
parser.add_argument('command')
parser.add_argument('arguments', nargs='*')
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
try:
passwd = pwd.getpwnam(args.user)
group = grp.getgrgid(passwd.pw_gid)
except KeyError:
logger.critical("No such user or group")
return os.EX_NOUSER
filename = args.command
try:
drop_privileges(passwd, group)
os.execvp(filename, [filename] + args.arguments)
except (FileNotFoundError, PermissionError):
logger.critical("Could not execute {}".format(filename), file=sys.stderr)
return os.EX_NOINPUT
except OSError:
logger.exception("An OSError occurred")
return os.EX_OSERR
if __name__ == '__main__':
sys.exit(main())
|
import grp
import logging
import os
import pwd
import sys
from hades.common.cli import (
ArgumentParser, parser as common_parser, setup_cli_logging,
)
logger = logging.getLogger(__name__)
def drop_privileges(passwd, group):
os.setgid(group.gr_gid)
os.initgroups(passwd.pw_name, group.gr_gid)
os.setuid(passwd.pw_uid)
def main():
parser = ArgumentParser(parents=[common_parser])
parser.add_argument('user')
parser.add_argument('command')
parser.add_argument('arguments', nargs='*')
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
try:
passwd = pwd.getpwnam(args.user)
group = grp.getgrgid(passwd.pw_gid)
except KeyError:
logger.critical("No such user or group")
return os.EX_NOUSER
filename = args.command
try:
drop_privileges(passwd, group)
except PermissionError:
logging.exception("Can't drop privileges")
return os.EX_NOPERM
try:
os.execvp(filename, [filename] + args.arguments)
except (FileNotFoundError, PermissionError):
logger.critical("Could not execute {}".format(filename), file=sys.stderr)
return os.EX_NOINPUT
except OSError:
logger.exception("An OSError occurred")
return os.EX_OSERR
if __name__ == '__main__':
sys.exit(main())
|
Abort if privileges can't be dropped
|
Abort if privileges can't be dropped
|
Python
|
mit
|
agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades
|
python
|
## Code Before:
import grp
import logging
import os
import pwd
import sys
from hades.common.cli import (
ArgumentParser, parser as common_parser, setup_cli_logging,
)
logger = logging.getLogger(__name__)
def drop_privileges(passwd, group):
if os.geteuid() != 0:
logger.error("Can't drop privileges (EUID != 0)")
return
os.setgid(group.gr_gid)
os.initgroups(passwd.pw_name, group.gr_gid)
os.setuid(passwd.pw_uid)
def main():
parser = ArgumentParser(parents=[common_parser])
parser.add_argument('user')
parser.add_argument('command')
parser.add_argument('arguments', nargs='*')
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
try:
passwd = pwd.getpwnam(args.user)
group = grp.getgrgid(passwd.pw_gid)
except KeyError:
logger.critical("No such user or group")
return os.EX_NOUSER
filename = args.command
try:
drop_privileges(passwd, group)
os.execvp(filename, [filename] + args.arguments)
except (FileNotFoundError, PermissionError):
logger.critical("Could not execute {}".format(filename), file=sys.stderr)
return os.EX_NOINPUT
except OSError:
logger.exception("An OSError occurred")
return os.EX_OSERR
if __name__ == '__main__':
sys.exit(main())
## Instruction:
Abort if privileges can't be dropped
## Code After:
import grp
import logging
import os
import pwd
import sys
from hades.common.cli import (
ArgumentParser, parser as common_parser, setup_cli_logging,
)
logger = logging.getLogger(__name__)
def drop_privileges(passwd, group):
os.setgid(group.gr_gid)
os.initgroups(passwd.pw_name, group.gr_gid)
os.setuid(passwd.pw_uid)
def main():
parser = ArgumentParser(parents=[common_parser])
parser.add_argument('user')
parser.add_argument('command')
parser.add_argument('arguments', nargs='*')
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
try:
passwd = pwd.getpwnam(args.user)
group = grp.getgrgid(passwd.pw_gid)
except KeyError:
logger.critical("No such user or group")
return os.EX_NOUSER
filename = args.command
try:
drop_privileges(passwd, group)
except PermissionError:
logging.exception("Can't drop privileges")
return os.EX_NOPERM
try:
os.execvp(filename, [filename] + args.arguments)
except (FileNotFoundError, PermissionError):
logger.critical("Could not execute {}".format(filename), file=sys.stderr)
return os.EX_NOINPUT
except OSError:
logger.exception("An OSError occurred")
return os.EX_OSERR
if __name__ == '__main__':
sys.exit(main())
|
// ... existing code ...
def drop_privileges(passwd, group):
os.setgid(group.gr_gid)
os.initgroups(passwd.pw_name, group.gr_gid)
os.setuid(passwd.pw_uid)
// ... modified code ...
filename = args.command
try:
drop_privileges(passwd, group)
except PermissionError:
logging.exception("Can't drop privileges")
return os.EX_NOPERM
try:
os.execvp(filename, [filename] + args.arguments)
except (FileNotFoundError, PermissionError):
logger.critical("Could not execute {}".format(filename), file=sys.stderr)
// ... rest of the code ...
|
5abea2d21c62228eb9a7270a1e10f9f7ec4316af
|
source/services/rotten_tomatoes_service.py
|
source/services/rotten_tomatoes_service.py
|
import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
|
import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
formatted_title = formatted_title.replace(',', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
|
Remove comma for RT search
|
Remove comma for RT search
|
Python
|
mit
|
jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu
|
python
|
## Code Before:
import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
## Instruction:
Remove comma for RT search
## Code After:
import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
formatted_title = formatted_title.replace(',', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
|
// ... existing code ...
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
formatted_title = formatted_title.replace(',', '')
return formatted_title
// ... rest of the code ...
|
7b9ba8634c0a02cb4c82313d9bef3197640c3187
|
pyqtgraph/graphicsItems/tests/test_PlotDataItem.py
|
pyqtgraph/graphicsItems/tests/test_PlotDataItem.py
|
import numpy as np
import pyqtgraph as pg
pg.mkQApp()
def test_fft():
f = 20.
x = np.linspace(0, 1, 1000)
y = np.sin(2 * np.pi * f * x)
pd = pg.PlotDataItem(x, y)
pd.setFftMode(True)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - f) < 0.03
x = np.linspace(0, 1, 1001)
y = np.sin(2 * np.pi * f * x)
pd.setData(x, y)
x, y = pd.getData()
assert abs(x[np.argmax(y)]- f) < 0.03
pd.setLogMode(True, False)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - np.log10(f)) < 0.01
|
import numpy as np
import pyqtgraph as pg
pg.mkQApp()
def test_fft():
f = 20.
x = np.linspace(0, 1, 1000)
y = np.sin(2 * np.pi * f * x)
pd = pg.PlotDataItem(x, y)
pd.setFftMode(True)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - f) < 0.03
x = np.linspace(0, 1, 1001)
y = np.sin(2 * np.pi * f * x)
pd.setData(x, y)
x, y = pd.getData()
assert abs(x[np.argmax(y)]- f) < 0.03
pd.setLogMode(True, False)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - np.log10(f)) < 0.01
def test_setData():
pdi = pg.PlotDataItem()
#test empty data
pdi.setData([])
#test y data
y = list(np.random.normal(size=100))
pdi.setData(y)
assert len(pdi.xData) == 100
assert len(pdi.yData) == 100
#test x, y data
y += list(np.random.normal(size=50))
x = np.linspace(5, 10, 150)
pdi.setData(x, y)
assert len(pdi.xData) == 150
assert len(pdi.yData) == 150
#test dict of x, y list
y += list(np.random.normal(size=50))
x = list(np.linspace(5, 10, 200))
pdi.setData({'x': x, 'y': y})
assert len(pdi.xData) == 200
assert len(pdi.yData) == 200
|
Add test_setData() for PlotDataItem class
|
Add test_setData() for PlotDataItem class
|
Python
|
mit
|
campagnola/acq4,pbmanis/acq4,meganbkratz/acq4,acq4/acq4,meganbkratz/acq4,acq4/acq4,pbmanis/acq4,acq4/acq4,pbmanis/acq4,meganbkratz/acq4,acq4/acq4,meganbkratz/acq4,campagnola/acq4,campagnola/acq4,pbmanis/acq4,campagnola/acq4
|
python
|
## Code Before:
import numpy as np
import pyqtgraph as pg
pg.mkQApp()
def test_fft():
f = 20.
x = np.linspace(0, 1, 1000)
y = np.sin(2 * np.pi * f * x)
pd = pg.PlotDataItem(x, y)
pd.setFftMode(True)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - f) < 0.03
x = np.linspace(0, 1, 1001)
y = np.sin(2 * np.pi * f * x)
pd.setData(x, y)
x, y = pd.getData()
assert abs(x[np.argmax(y)]- f) < 0.03
pd.setLogMode(True, False)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - np.log10(f)) < 0.01
## Instruction:
Add test_setData() for PlotDataItem class
## Code After:
import numpy as np
import pyqtgraph as pg
pg.mkQApp()
def test_fft():
f = 20.
x = np.linspace(0, 1, 1000)
y = np.sin(2 * np.pi * f * x)
pd = pg.PlotDataItem(x, y)
pd.setFftMode(True)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - f) < 0.03
x = np.linspace(0, 1, 1001)
y = np.sin(2 * np.pi * f * x)
pd.setData(x, y)
x, y = pd.getData()
assert abs(x[np.argmax(y)]- f) < 0.03
pd.setLogMode(True, False)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - np.log10(f)) < 0.01
def test_setData():
pdi = pg.PlotDataItem()
#test empty data
pdi.setData([])
#test y data
y = list(np.random.normal(size=100))
pdi.setData(y)
assert len(pdi.xData) == 100
assert len(pdi.yData) == 100
#test x, y data
y += list(np.random.normal(size=50))
x = np.linspace(5, 10, 150)
pdi.setData(x, y)
assert len(pdi.xData) == 150
assert len(pdi.yData) == 150
#test dict of x, y list
y += list(np.random.normal(size=50))
x = list(np.linspace(5, 10, 200))
pdi.setData({'x': x, 'y': y})
assert len(pdi.xData) == 200
assert len(pdi.yData) == 200
|
...
pd.setLogMode(True, False)
x, y = pd.getData()
assert abs(x[np.argmax(y)] - np.log10(f)) < 0.01
def test_setData():
pdi = pg.PlotDataItem()
#test empty data
pdi.setData([])
#test y data
y = list(np.random.normal(size=100))
pdi.setData(y)
assert len(pdi.xData) == 100
assert len(pdi.yData) == 100
#test x, y data
y += list(np.random.normal(size=50))
x = np.linspace(5, 10, 150)
pdi.setData(x, y)
assert len(pdi.xData) == 150
assert len(pdi.yData) == 150
#test dict of x, y list
y += list(np.random.normal(size=50))
x = list(np.linspace(5, 10, 200))
pdi.setData({'x': x, 'y': y})
assert len(pdi.xData) == 200
assert len(pdi.yData) == 200
...
|
377beee13a8cd0ca23f8f2e37dd2816571721921
|
tests/sources_tests.py
|
tests/sources_tests.py
|
import os
import subprocess
from nose.tools import istest, assert_equal
from whack.sources import PackageSourceFetcher
from whack.tempdir import create_temporary_dir
from whack.files import read_file, write_file
@istest
def can_fetch_package_source_from_source_control():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
_convert_to_git_repo(package_source_dir)
source_fetcher = PackageSourceFetcher([])
repo_uri = "git+file://{0}".format(package_source_dir)
with source_fetcher.fetch(repo_uri) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
def _convert_to_git_repo(cwd):
def _git(command):
subprocess.check_call(["git"] + command, cwd=cwd)
_git(["init"])
_git(["add", "."])
_git(["commit", "-m", "Initial commit"])
|
import os
import subprocess
from nose.tools import istest, assert_equal
from whack.sources import PackageSourceFetcher
from whack.tempdir import create_temporary_dir
from whack.files import read_file, write_file
@istest
def can_fetch_package_source_from_source_control():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
_convert_to_git_repo(package_source_dir)
source_fetcher = PackageSourceFetcher([])
repo_uri = "git+file://{0}".format(package_source_dir)
with source_fetcher.fetch(repo_uri) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
@istest
def can_fetch_package_source_from_local_path():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
source_fetcher = PackageSourceFetcher([])
with source_fetcher.fetch(package_source_dir) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
def _convert_to_git_repo(cwd):
def _git(command):
subprocess.check_call(["git"] + command, cwd=cwd)
_git(["init"])
_git(["add", "."])
_git(["commit", "-m", "Initial commit"])
|
Add test for fetching local package sources
|
Add test for fetching local package sources
|
Python
|
bsd-2-clause
|
mwilliamson/whack
|
python
|
## Code Before:
import os
import subprocess
from nose.tools import istest, assert_equal
from whack.sources import PackageSourceFetcher
from whack.tempdir import create_temporary_dir
from whack.files import read_file, write_file
@istest
def can_fetch_package_source_from_source_control():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
_convert_to_git_repo(package_source_dir)
source_fetcher = PackageSourceFetcher([])
repo_uri = "git+file://{0}".format(package_source_dir)
with source_fetcher.fetch(repo_uri) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
def _convert_to_git_repo(cwd):
def _git(command):
subprocess.check_call(["git"] + command, cwd=cwd)
_git(["init"])
_git(["add", "."])
_git(["commit", "-m", "Initial commit"])
## Instruction:
Add test for fetching local package sources
## Code After:
import os
import subprocess
from nose.tools import istest, assert_equal
from whack.sources import PackageSourceFetcher
from whack.tempdir import create_temporary_dir
from whack.files import read_file, write_file
@istest
def can_fetch_package_source_from_source_control():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
_convert_to_git_repo(package_source_dir)
source_fetcher = PackageSourceFetcher([])
repo_uri = "git+file://{0}".format(package_source_dir)
with source_fetcher.fetch(repo_uri) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
@istest
def can_fetch_package_source_from_local_path():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
source_fetcher = PackageSourceFetcher([])
with source_fetcher.fetch(package_source_dir) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
def _convert_to_git_repo(cwd):
def _git(command):
subprocess.check_call(["git"] + command, cwd=cwd)
_git(["init"])
_git(["add", "."])
_git(["commit", "-m", "Initial commit"])
|
...
repo_uri = "git+file://{0}".format(package_source_dir)
with source_fetcher.fetch(repo_uri) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
@istest
def can_fetch_package_source_from_local_path():
with create_temporary_dir() as package_source_dir:
write_file(os.path.join(package_source_dir, "name"), "Bob")
source_fetcher = PackageSourceFetcher([])
with source_fetcher.fetch(package_source_dir) as package_source:
assert_equal("Bob", read_file(os.path.join(package_source.path, "name")))
...
|
e2ee9045c59e3f03c5342ee41d23e4adece43535
|
weather/admin.py
|
weather/admin.py
|
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
pass
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
Define URL for OpenLayers.js to DPaW CDN.
|
Define URL for OpenLayers.js to DPaW CDN.
|
Python
|
bsd-3-clause
|
parksandwildlife/resource_tracking,parksandwildlife/resource_tracking,ropable/resource_tracking,ropable/resource_tracking,ropable/resource_tracking,parksandwildlife/resource_tracking
|
python
|
## Code Before:
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
pass
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
## Instruction:
Define URL for OpenLayers.js to DPaW CDN.
## Code After:
from django.contrib.admin import ModelAdmin, register
from django.contrib.gis.admin import GeoModelAdmin
from weather.models import WeatherStation, Location
@register(Location)
class LocationAdmin(GeoModelAdmin):
openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
class WeatherStationAdmin(ModelAdmin):
list_display = (
'name', 'abbreviation', 'ip_address', 'last_reading',
'battery_voltage', 'connect_every', 'active')
|
...
@register(Location)
class LocationAdmin(GeoModelAdmin):
openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js'
@register(WeatherStation)
...
|
c395f95f678080cd78c56c27fadc8b37d6766eaa
|
k4kotlin-core/src/main/java/com/livinglifetechway/k4kotlin/core/Keyboard.kt
|
k4kotlin-core/src/main/java/com/livinglifetechway/k4kotlin/core/Keyboard.kt
|
package com.livinglifetechway.k4kotlin.core
import android.app.Activity
import android.content.Context
import android.view.View
import android.view.inputmethod.InputMethodManager
/**
* Hides the soft keyboard
* @receiver Activity
* @return a boolean value if the action was performed or not
*/
fun Activity.hideKeyboard(): Boolean {
val view = currentFocus
view?.let {
val inputMethodManager = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
return inputMethodManager.hideSoftInputFromWindow(view.windowToken, InputMethodManager.HIDE_NOT_ALWAYS)
}
return false
}
/**
* Opens up the keyboard by focusing on the view
* @receiver View
*/
fun View.showKeyboard() {
val imm = this.context.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.showSoftInput(this, InputMethodManager.SHOW_IMPLICIT)
}
/**
* Opens up the keyboard forcefully
* @receiver Context
*/
fun Context.showKeyboard() {
val imm = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.toggleSoftInput(InputMethodManager.SHOW_IMPLICIT, 0)
}
|
package com.livinglifetechway.k4kotlin.core
import android.app.Activity
import android.content.Context
import android.view.View
import android.view.inputmethod.InputMethodManager
/**
* Hides the soft keyboard
* @receiver Activity
* @return a boolean value if the action was performed or not
*/
fun Activity.hideKeyboard(): Boolean {
val view = currentFocus
view?.let {
val inputMethodManager = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
return inputMethodManager.hideSoftInputFromWindow(view.windowToken, InputMethodManager.HIDE_NOT_ALWAYS)
}
return false
}
/**
* Opens up the keyboard by focusing on the view
* @receiver View
*/
fun View.showKeyboard() {
val imm = this.context.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
this.requestFocus()
imm.showSoftInput(this, InputMethodManager.SHOW_IMPLICIT)
}
/**
* Opens up the keyboard forcefully
* @receiver Context
*/
fun Context.showKeyboard() {
val imm = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.toggleSoftInput(InputMethodManager.SHOW_IMPLICIT, 0)
}
|
Fix show keyboard method for the view to requestFocus first
|
Fix show keyboard method for the view to requestFocus first
|
Kotlin
|
apache-2.0
|
kirtan403/K4Kotlin,kirtan403/K4Kotlin
|
kotlin
|
## Code Before:
package com.livinglifetechway.k4kotlin.core
import android.app.Activity
import android.content.Context
import android.view.View
import android.view.inputmethod.InputMethodManager
/**
* Hides the soft keyboard
* @receiver Activity
* @return a boolean value if the action was performed or not
*/
fun Activity.hideKeyboard(): Boolean {
val view = currentFocus
view?.let {
val inputMethodManager = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
return inputMethodManager.hideSoftInputFromWindow(view.windowToken, InputMethodManager.HIDE_NOT_ALWAYS)
}
return false
}
/**
* Opens up the keyboard by focusing on the view
* @receiver View
*/
fun View.showKeyboard() {
val imm = this.context.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.showSoftInput(this, InputMethodManager.SHOW_IMPLICIT)
}
/**
* Opens up the keyboard forcefully
* @receiver Context
*/
fun Context.showKeyboard() {
val imm = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.toggleSoftInput(InputMethodManager.SHOW_IMPLICIT, 0)
}
## Instruction:
Fix show keyboard method for the view to requestFocus first
## Code After:
package com.livinglifetechway.k4kotlin.core
import android.app.Activity
import android.content.Context
import android.view.View
import android.view.inputmethod.InputMethodManager
/**
* Hides the soft keyboard
* @receiver Activity
* @return a boolean value if the action was performed or not
*/
fun Activity.hideKeyboard(): Boolean {
val view = currentFocus
view?.let {
val inputMethodManager = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
return inputMethodManager.hideSoftInputFromWindow(view.windowToken, InputMethodManager.HIDE_NOT_ALWAYS)
}
return false
}
/**
* Opens up the keyboard by focusing on the view
* @receiver View
*/
fun View.showKeyboard() {
val imm = this.context.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
this.requestFocus()
imm.showSoftInput(this, InputMethodManager.SHOW_IMPLICIT)
}
/**
* Opens up the keyboard forcefully
* @receiver Context
*/
fun Context.showKeyboard() {
val imm = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.toggleSoftInput(InputMethodManager.SHOW_IMPLICIT, 0)
}
|
# ... existing code ...
*/
fun View.showKeyboard() {
val imm = this.context.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
this.requestFocus()
imm.showSoftInput(this, InputMethodManager.SHOW_IMPLICIT)
}
# ... rest of the code ...
|
cbafc968343cd2b001bcee354d418c9886fe94b4
|
tests/test_network.py
|
tests/test_network.py
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
Use localhost for network source tests to avoid waiting for DNS.
|
Use localhost for network source tests to avoid waiting for DNS.
|
Python
|
bsd-3-clause
|
openxc/openxc-python,openxc/openxc-python,openxc/openxc-python
|
python
|
## Code Before:
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
## Instruction:
Use localhost for network source tests to avoid waiting for DNS.
## Code After:
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
...
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
...
|
bb01e6a7f8713b84b92a19a79ffcac4277c32aa7
|
src/main/java/com/github/ferstl/depgraph/DepGraphMojo.java
|
src/main/java/com/github/ferstl/depgraph/DepGraphMojo.java
|
package com.github.ferstl.depgraph;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import com.github.ferstl.depgraph.dot.DotBuilder;
@Mojo(
name = "graph",
aggregator = false,
defaultPhase = LifecyclePhase.NONE,
requiresDependencyCollection = ResolutionScope.TEST,
requiresDirectInvocation = true,
threadSafe = true)
public class DepGraphMojo extends AbstractDepGraphMojo {
@Override
protected GraphFactory createGraphFactory(
DependencyGraphBuilder dependencyGraphBuilder, ArtifactFilter artifactFilter) {
DotBuilder dotBuilder = new DotBuilder(NodeRenderers.VERSIONLESS_ID, NodeRenderers.ARTIFACT_ID_LABEL);
return new AggregatingDotGraphFactory(dependencyGraphBuilder, artifactFilter, dotBuilder);
}
}
|
package com.github.ferstl.depgraph;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import com.github.ferstl.depgraph.dot.DotBuilder;
@Mojo(
name = "graph",
aggregator = false,
defaultPhase = LifecyclePhase.NONE,
requiresDependencyCollection = ResolutionScope.TEST,
requiresDirectInvocation = true,
threadSafe = true)
public class DepGraphMojo extends AbstractDepGraphMojo {
@Override
protected GraphFactory createGraphFactory(
DependencyGraphBuilder dependencyGraphBuilder, ArtifactFilter artifactFilter) {
DotBuilder dotBuilder = new DotBuilder(NodeRenderers.VERSIONLESS_ID, NodeRenderers.ARTIFACT_ID_LABEL);
return new SimpleDotGraphFactory(dependencyGraphBuilder, artifactFilter, dotBuilder);
}
}
|
Use the right graph factory
|
Use the right graph factory
|
Java
|
apache-2.0
|
ferstl/depgraph-maven-plugin
|
java
|
## Code Before:
package com.github.ferstl.depgraph;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import com.github.ferstl.depgraph.dot.DotBuilder;
@Mojo(
name = "graph",
aggregator = false,
defaultPhase = LifecyclePhase.NONE,
requiresDependencyCollection = ResolutionScope.TEST,
requiresDirectInvocation = true,
threadSafe = true)
public class DepGraphMojo extends AbstractDepGraphMojo {
@Override
protected GraphFactory createGraphFactory(
DependencyGraphBuilder dependencyGraphBuilder, ArtifactFilter artifactFilter) {
DotBuilder dotBuilder = new DotBuilder(NodeRenderers.VERSIONLESS_ID, NodeRenderers.ARTIFACT_ID_LABEL);
return new AggregatingDotGraphFactory(dependencyGraphBuilder, artifactFilter, dotBuilder);
}
}
## Instruction:
Use the right graph factory
## Code After:
package com.github.ferstl.depgraph;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import com.github.ferstl.depgraph.dot.DotBuilder;
@Mojo(
name = "graph",
aggregator = false,
defaultPhase = LifecyclePhase.NONE,
requiresDependencyCollection = ResolutionScope.TEST,
requiresDirectInvocation = true,
threadSafe = true)
public class DepGraphMojo extends AbstractDepGraphMojo {
@Override
protected GraphFactory createGraphFactory(
DependencyGraphBuilder dependencyGraphBuilder, ArtifactFilter artifactFilter) {
DotBuilder dotBuilder = new DotBuilder(NodeRenderers.VERSIONLESS_ID, NodeRenderers.ARTIFACT_ID_LABEL);
return new SimpleDotGraphFactory(dependencyGraphBuilder, artifactFilter, dotBuilder);
}
}
|
# ... existing code ...
DependencyGraphBuilder dependencyGraphBuilder, ArtifactFilter artifactFilter) {
DotBuilder dotBuilder = new DotBuilder(NodeRenderers.VERSIONLESS_ID, NodeRenderers.ARTIFACT_ID_LABEL);
return new SimpleDotGraphFactory(dependencyGraphBuilder, artifactFilter, dotBuilder);
}
}
# ... rest of the code ...
|
2494cfe6a329f028fa8fb2d095f7da5ec34b36b1
|
src/cal/TrigoOperator.java
|
src/cal/TrigoOperator.java
|
package cal;
public abstract class TrigoOperator extends Operator{
public TrigoOperator(String sign) {
super(sign);
}
}
|
package cal;
/**
*
* @author huiming
*
*/
public abstract class TrigoOperator extends Operator{
public TrigoOperator(String sign) {
super(sign);
}
}
|
Create a new branch for Java Doc
|
[Add] Create a new branch for Java Doc
|
Java
|
apache-2.0
|
shinypichu88/CS3343-Cal
|
java
|
## Code Before:
package cal;
public abstract class TrigoOperator extends Operator{
public TrigoOperator(String sign) {
super(sign);
}
}
## Instruction:
[Add] Create a new branch for Java Doc
## Code After:
package cal;
/**
*
* @author huiming
*
*/
public abstract class TrigoOperator extends Operator{
public TrigoOperator(String sign) {
super(sign);
}
}
|
# ... existing code ...
package cal;
/**
*
* @author huiming
*
*/
public abstract class TrigoOperator extends Operator{
public TrigoOperator(String sign) {
# ... rest of the code ...
|
751bb272f7fb34b2909d9d0a5309b7d3503276af
|
src/storm2013/commands/SpinUp.java
|
src/storm2013/commands/SpinUp.java
|
package storm2013.commands;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import storm2013.Robot;
/**
* Spins up the firing wheel.
* @author evan1026
*/
public class SpinUp extends Command {
private Timer _onTargetTimer = new Timer();
/**
* Creates a new instance blahdy blahdy blah.
*/
public SpinUp(){
requires(Robot.shooter);
}
/**
* {@inheritDoc}
*/
protected void initialize() {
Robot.shooter.getPIDController().setSetpoint(3400);
Robot.shooter.getPIDController().enable();
_onTargetTimer.start();
_onTargetTimer.reset();
}
/**
* {@inheritDoc}
*/
protected void execute() {
}
/**
* {@inheritDoc}
*/
protected boolean isFinished() {
return Robot.shooter.getPIDController().onTarget();
}
/**
* {@inheritDoc}
*/
protected void end() {
}
/**
* {@inheritDoc}
*/
protected void interrupted() {
end();
}
}
|
package storm2013.commands;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import storm2013.Robot;
/**
* Spins up the firing wheel.
* @author evan1026
*/
public class SpinUp extends Command {
private Timer _onTargetTimer = new Timer();
/**
* Creates a new instance blahdy blahdy blah.
*/
public SpinUp(){
requires(Robot.shooter);
}
/**
* {@inheritDoc}
*/
protected void initialize() {
Robot.shooter.getPIDController().setSetpoint(3400);
Robot.shooter.getPIDController().enable();
_onTargetTimer.start();
_onTargetTimer.reset();
}
/**
* {@inheritDoc}
*/
protected void execute() {
if(!Robot.shooter.onTarget()) {
_onTargetTimer.reset();
}
}
/**
* {@inheritDoc}
*/
protected boolean isFinished() {
return _onTargetTimer.get() > 0.3;
}
/**
* {@inheritDoc}
*/
protected void end() {
}
/**
* {@inheritDoc}
*/
protected void interrupted() {
end();
}
}
|
Add timer for spin up finish
|
Add timer for spin up finish
|
Java
|
bsd-3-clause
|
2729StormRobotics/Storm2013
|
java
|
## Code Before:
package storm2013.commands;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import storm2013.Robot;
/**
* Spins up the firing wheel.
* @author evan1026
*/
public class SpinUp extends Command {
private Timer _onTargetTimer = new Timer();
/**
* Creates a new instance blahdy blahdy blah.
*/
public SpinUp(){
requires(Robot.shooter);
}
/**
* {@inheritDoc}
*/
protected void initialize() {
Robot.shooter.getPIDController().setSetpoint(3400);
Robot.shooter.getPIDController().enable();
_onTargetTimer.start();
_onTargetTimer.reset();
}
/**
* {@inheritDoc}
*/
protected void execute() {
}
/**
* {@inheritDoc}
*/
protected boolean isFinished() {
return Robot.shooter.getPIDController().onTarget();
}
/**
* {@inheritDoc}
*/
protected void end() {
}
/**
* {@inheritDoc}
*/
protected void interrupted() {
end();
}
}
## Instruction:
Add timer for spin up finish
## Code After:
package storm2013.commands;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import storm2013.Robot;
/**
* Spins up the firing wheel.
* @author evan1026
*/
public class SpinUp extends Command {
private Timer _onTargetTimer = new Timer();
/**
* Creates a new instance blahdy blahdy blah.
*/
public SpinUp(){
requires(Robot.shooter);
}
/**
* {@inheritDoc}
*/
protected void initialize() {
Robot.shooter.getPIDController().setSetpoint(3400);
Robot.shooter.getPIDController().enable();
_onTargetTimer.start();
_onTargetTimer.reset();
}
/**
* {@inheritDoc}
*/
protected void execute() {
if(!Robot.shooter.onTarget()) {
_onTargetTimer.reset();
}
}
/**
* {@inheritDoc}
*/
protected boolean isFinished() {
return _onTargetTimer.get() > 0.3;
}
/**
* {@inheritDoc}
*/
protected void end() {
}
/**
* {@inheritDoc}
*/
protected void interrupted() {
end();
}
}
|
// ... existing code ...
* {@inheritDoc}
*/
protected void execute() {
if(!Robot.shooter.onTarget()) {
_onTargetTimer.reset();
}
}
/**
// ... modified code ...
* {@inheritDoc}
*/
protected boolean isFinished() {
return _onTargetTimer.get() > 0.3;
}
/**
// ... rest of the code ...
|
c709e2e6af311678aedbc7cc402c4f13a2cd9cc0
|
arl/src/main/kotlin/Main.kt
|
arl/src/main/kotlin/Main.kt
|
import machine.learning.ARL
import java.io.File
import java.util.*
fun main(args : Array<String>) {
// we open the .dat file located in data
val myFile = File("data/EURUSD.dat").inputStream()
val array2 : ArrayList<Double> = arrayListOf()
myFile.bufferedReader().useLines { lines -> lines.forEach {
array2.add( it.split("/")[0].split(" ").last().toDouble())
} }
val time = System.currentTimeMillis()
val arl = ARL(array2.toDoubleArray().slice(0..100000), 5)
arl.trainingLoop()
arl.testLoop(prices=array2.toDoubleArray().slice(50000..60000).toDoubleArray())
// println(arl.toString())
}
|
import machine.learning.ARL
import java.io.File
import java.util.*
fun main(args : Array<String>) {
// we open the .dat file located in data
val myFile = File("data/EURUSD.dat").inputStream()
val array2 : ArrayList<Double> = arrayListOf()
myFile.bufferedReader().useLines { lines -> lines.forEach {
array2.add( it.split("/")[0].split(" ").last().toDouble())
} }
val time = System.currentTimeMillis()
val arl = ARL(array2.toDoubleArray().slice(0..100000), 5)
arl.trainingLoop()
arl.testLoop(prices=array2.toDoubleArray().slice(100000..110000))
println("time = ${(System.currentTimeMillis() - time) / 1000 }")
// println(arl.toString())
}
|
Update the test loop accord to the signature changement
|
Update the test loop accord to the signature changement
|
Kotlin
|
mit
|
mencattini/eMuLATe,mencattini/eMuLATe,mencattini/eMuLATe,mencattini/eMuLATe
|
kotlin
|
## Code Before:
import machine.learning.ARL
import java.io.File
import java.util.*
fun main(args : Array<String>) {
// we open the .dat file located in data
val myFile = File("data/EURUSD.dat").inputStream()
val array2 : ArrayList<Double> = arrayListOf()
myFile.bufferedReader().useLines { lines -> lines.forEach {
array2.add( it.split("/")[0].split(" ").last().toDouble())
} }
val time = System.currentTimeMillis()
val arl = ARL(array2.toDoubleArray().slice(0..100000), 5)
arl.trainingLoop()
arl.testLoop(prices=array2.toDoubleArray().slice(50000..60000).toDoubleArray())
// println(arl.toString())
}
## Instruction:
Update the test loop accord to the signature changement
## Code After:
import machine.learning.ARL
import java.io.File
import java.util.*
fun main(args : Array<String>) {
// we open the .dat file located in data
val myFile = File("data/EURUSD.dat").inputStream()
val array2 : ArrayList<Double> = arrayListOf()
myFile.bufferedReader().useLines { lines -> lines.forEach {
array2.add( it.split("/")[0].split(" ").last().toDouble())
} }
val time = System.currentTimeMillis()
val arl = ARL(array2.toDoubleArray().slice(0..100000), 5)
arl.trainingLoop()
arl.testLoop(prices=array2.toDoubleArray().slice(100000..110000))
println("time = ${(System.currentTimeMillis() - time) / 1000 }")
// println(arl.toString())
}
|
# ... existing code ...
val arl = ARL(array2.toDoubleArray().slice(0..100000), 5)
arl.trainingLoop()
arl.testLoop(prices=array2.toDoubleArray().slice(100000..110000))
println("time = ${(System.currentTimeMillis() - time) / 1000 }")
// println(arl.toString())
}
# ... rest of the code ...
|
08bffa5f6df497f28fe3481fe80b517628b0f1a3
|
tmdb3/cache_engine.py
|
tmdb3/cache_engine.py
|
class Engines( object ):
def __init__(self):
self._engines = {}
def register(self, engine):
self._engines[engine.__name__] = engine
self._engines[engine.name] = engine
def __getitem__(self, key):
return self._engines[key]
Engines = Engines()
class CacheEngineType( type ):
"""
Cache Engine Metaclass that registers new engines against the cache
for named selection and use.
"""
def __init__(mcs, name, bases, attrs):
super(CacheEngineType, mcs).__init__(name, bases, attrs)
if name != 'CacheEngine':
# skip base class
Engines.register(mcs)
class CacheEngine( object ):
__metaclass__ = CacheEngineType
name = 'unspecified'
def __init__(self, parent):
self.parent = parent
def configure(self):
raise RuntimeError
def get(self, key):
raise RuntimeError
def put(self, key, value, lifetime):
raise RuntimeError
def expire(self, key):
raise RuntimeError
|
class Engines( object ):
def __init__(self):
self._engines = {}
def register(self, engine):
self._engines[engine.__name__] = engine
self._engines[engine.name] = engine
def __getitem__(self, key):
return self._engines[key]
def __contains__(self, key):
return self._engines.__contains__(key)
Engines = Engines()
class CacheEngineType( type ):
"""
Cache Engine Metaclass that registers new engines against the cache
for named selection and use.
"""
def __init__(mcs, name, bases, attrs):
super(CacheEngineType, mcs).__init__(name, bases, attrs)
if name != 'CacheEngine':
# skip base class
Engines.register(mcs)
class CacheEngine( object ):
__metaclass__ = CacheEngineType
name = 'unspecified'
def __init__(self, parent):
self.parent = parent
def configure(self):
raise RuntimeError
def get(self, key):
raise RuntimeError
def put(self, key, value, lifetime):
raise RuntimeError
def expire(self, key):
raise RuntimeError
|
Add __contains__ for proper lookup in cache Engines class.
|
Add __contains__ for proper lookup in cache Engines class.
|
Python
|
bsd-3-clause
|
wagnerrp/pytmdb3,naveenvhegde/pytmdb3
|
python
|
## Code Before:
class Engines( object ):
def __init__(self):
self._engines = {}
def register(self, engine):
self._engines[engine.__name__] = engine
self._engines[engine.name] = engine
def __getitem__(self, key):
return self._engines[key]
Engines = Engines()
class CacheEngineType( type ):
"""
Cache Engine Metaclass that registers new engines against the cache
for named selection and use.
"""
def __init__(mcs, name, bases, attrs):
super(CacheEngineType, mcs).__init__(name, bases, attrs)
if name != 'CacheEngine':
# skip base class
Engines.register(mcs)
class CacheEngine( object ):
__metaclass__ = CacheEngineType
name = 'unspecified'
def __init__(self, parent):
self.parent = parent
def configure(self):
raise RuntimeError
def get(self, key):
raise RuntimeError
def put(self, key, value, lifetime):
raise RuntimeError
def expire(self, key):
raise RuntimeError
## Instruction:
Add __contains__ for proper lookup in cache Engines class.
## Code After:
class Engines( object ):
def __init__(self):
self._engines = {}
def register(self, engine):
self._engines[engine.__name__] = engine
self._engines[engine.name] = engine
def __getitem__(self, key):
return self._engines[key]
def __contains__(self, key):
return self._engines.__contains__(key)
Engines = Engines()
class CacheEngineType( type ):
"""
Cache Engine Metaclass that registers new engines against the cache
for named selection and use.
"""
def __init__(mcs, name, bases, attrs):
super(CacheEngineType, mcs).__init__(name, bases, attrs)
if name != 'CacheEngine':
# skip base class
Engines.register(mcs)
class CacheEngine( object ):
__metaclass__ = CacheEngineType
name = 'unspecified'
def __init__(self, parent):
self.parent = parent
def configure(self):
raise RuntimeError
def get(self, key):
raise RuntimeError
def put(self, key, value, lifetime):
raise RuntimeError
def expire(self, key):
raise RuntimeError
|
# ... existing code ...
self._engines[engine.name] = engine
def __getitem__(self, key):
return self._engines[key]
def __contains__(self, key):
return self._engines.__contains__(key)
Engines = Engines()
class CacheEngineType( type ):
# ... rest of the code ...
|
3458a6b6ca730aeb3c84a8a3919ed2d81f43def4
|
src/main/java/tld/testmod/common/animation/ModAnimation.java
|
src/main/java/tld/testmod/common/animation/ModAnimation.java
|
package tld.testmod.common.animation;
import net.minecraft.world.World;
import net.minecraftforge.client.model.animation.Animation;
import tld.testmod.ModLogger;
public enum ModAnimation
{
INSTANCE;
static long timeOffset;
static Object lastWorld;
/**
* Get the global world time for the current tick, in seconds.
*/
public static float getWorldTime(World world)
{
return getWorldTime(world, 0);
}
/**
* Get the global world time for the current tick + partial tick progress, in seconds.
*/
public static float getWorldTime(World world, float tickProgress)
{
if (!world.equals(lastWorld))
{
timeOffset = world.getTotalWorldTime();
lastWorld = world;
}
long diff = world.getTotalWorldTime() - timeOffset;
//ModLogger.info("Animation#getWorldTime: World: %s, time: %d, offset %d, diff: %d", lastWorld, world.getTotalWorldTime(), timeOffset, diff);
return (world.getTotalWorldTime() - timeOffset + tickProgress) / 20;
}
/**
* Get current partialTickTime.
*/
public static float getPartialTickTime()
{
return Animation.getPartialTickTime();
}
}
|
package tld.testmod.common.animation;
import net.minecraft.world.World;
import net.minecraftforge.client.model.animation.Animation;
public enum ModAnimation
{
INSTANCE;
static long timeOffset;
static int lastWorldHashCode;
/**
* Get the global world time for the current tick, in seconds.
*/
public static float getWorldTime(World world)
{
return getWorldTime(world, 0);
}
/**
* Get the global world time for the current tick + partial tick progress, in seconds.
*/
public static float getWorldTime(World world, float tickProgress)
{
int worldHashCode = world.hashCode();
if (worldHashCode != lastWorldHashCode)
{
timeOffset = world.getTotalWorldTime();
lastWorldHashCode = worldHashCode;
}
//long diff = world.getTotalWorldTime() - timeOffset;
//ModLogger.info("Animation#getWorldTime: World: %s, time: %d, offset %d, diff: %d", lastWorldHashCode, world.getTotalWorldTime(), timeOffset, diff);
return (world.getTotalWorldTime() - timeOffset + tickProgress) / 20;
}
/**
* Get current partialTickTime.
*/
public static float getPartialTickTime()
{
return Animation.getPartialTickTime();
}
}
|
Use the world hashcode instead of the world instance.
|
Use the world hashcode instead of the world instance.
|
Java
|
mit
|
Aeronica/TestMod
|
java
|
## Code Before:
package tld.testmod.common.animation;
import net.minecraft.world.World;
import net.minecraftforge.client.model.animation.Animation;
import tld.testmod.ModLogger;
public enum ModAnimation
{
INSTANCE;
static long timeOffset;
static Object lastWorld;
/**
* Get the global world time for the current tick, in seconds.
*/
public static float getWorldTime(World world)
{
return getWorldTime(world, 0);
}
/**
* Get the global world time for the current tick + partial tick progress, in seconds.
*/
public static float getWorldTime(World world, float tickProgress)
{
if (!world.equals(lastWorld))
{
timeOffset = world.getTotalWorldTime();
lastWorld = world;
}
long diff = world.getTotalWorldTime() - timeOffset;
//ModLogger.info("Animation#getWorldTime: World: %s, time: %d, offset %d, diff: %d", lastWorld, world.getTotalWorldTime(), timeOffset, diff);
return (world.getTotalWorldTime() - timeOffset + tickProgress) / 20;
}
/**
* Get current partialTickTime.
*/
public static float getPartialTickTime()
{
return Animation.getPartialTickTime();
}
}
## Instruction:
Use the world hashcode instead of the world instance.
## Code After:
package tld.testmod.common.animation;
import net.minecraft.world.World;
import net.minecraftforge.client.model.animation.Animation;
public enum ModAnimation
{
INSTANCE;
static long timeOffset;
static int lastWorldHashCode;
/**
* Get the global world time for the current tick, in seconds.
*/
public static float getWorldTime(World world)
{
return getWorldTime(world, 0);
}
/**
* Get the global world time for the current tick + partial tick progress, in seconds.
*/
public static float getWorldTime(World world, float tickProgress)
{
int worldHashCode = world.hashCode();
if (worldHashCode != lastWorldHashCode)
{
timeOffset = world.getTotalWorldTime();
lastWorldHashCode = worldHashCode;
}
//long diff = world.getTotalWorldTime() - timeOffset;
//ModLogger.info("Animation#getWorldTime: World: %s, time: %d, offset %d, diff: %d", lastWorldHashCode, world.getTotalWorldTime(), timeOffset, diff);
return (world.getTotalWorldTime() - timeOffset + tickProgress) / 20;
}
/**
* Get current partialTickTime.
*/
public static float getPartialTickTime()
{
return Animation.getPartialTickTime();
}
}
|
// ... existing code ...
import net.minecraft.world.World;
import net.minecraftforge.client.model.animation.Animation;
public enum ModAnimation
{
// ... modified code ...
INSTANCE;
static long timeOffset;
static int lastWorldHashCode;
/**
* Get the global world time for the current tick, in seconds.
...
*/
public static float getWorldTime(World world, float tickProgress)
{
int worldHashCode = world.hashCode();
if (worldHashCode != lastWorldHashCode)
{
timeOffset = world.getTotalWorldTime();
lastWorldHashCode = worldHashCode;
}
//long diff = world.getTotalWorldTime() - timeOffset;
//ModLogger.info("Animation#getWorldTime: World: %s, time: %d, offset %d, diff: %d", lastWorldHashCode, world.getTotalWorldTime(), timeOffset, diff);
return (world.getTotalWorldTime() - timeOffset + tickProgress) / 20;
}
// ... rest of the code ...
|
9808f1933d83102ee7aa1a5f176433740975af88
|
pytest-devpi-server/tests/integration/test_devpi_server.py
|
pytest-devpi-server/tests/integration/test_devpi_server.py
|
import json
NEW_INDEX = {
'result': {
'acl_toxresult_upload': [':ANONYMOUS:'],
'acl_upload': ['testuser'],
'bases': [],
'mirror_whitelist': [],
'projects': [],
'pypi_whitelist': [],
'type': 'stage',
'volatile': True
},
'type': 'indexconfig'
}
def test_server(devpi_server):
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res) == NEW_INDEX
def test_upload(devpi_server):
pkg_dir = devpi_server.workspace / 'pkg'
pkg_dir.mkdir_p()
setup_py = pkg_dir / 'setup.py'
setup_py.write_text("""
from setuptools import setup
setup(name='test-foo',
version='1.2.3')
""")
pkg_dir.chdir()
devpi_server.api('upload')
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res)['result']['projects'] == ['test-foo']
def test_function_index(devpi_server, devpi_function_index):
res = devpi_server.api('getjson', '/{}/test_function_index'.format(devpi_server.user))
assert json.loads(res) == NEW_INDEX
|
import json
NEW_INDEX = {
u"result": {
u"acl_toxresult_upload": [u":ANONYMOUS:"],
u"acl_upload": [u"testuser"],
u"bases": [],
u"mirror_whitelist": [],
u"projects": [],
u"type": u"stage",
u"volatile": True,
},
u"type": u"indexconfig",
}
def test_server(devpi_server):
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res) == NEW_INDEX
def test_upload(devpi_server):
pkg_dir = devpi_server.workspace / 'pkg'
pkg_dir.mkdir_p()
setup_py = pkg_dir / 'setup.py'
setup_py.write_text("""
from setuptools import setup
setup(name='test-foo',
version='1.2.3')
""")
pkg_dir.chdir()
devpi_server.api('upload')
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res)['result']['projects'] == ['test-foo']
def test_function_index(devpi_server, devpi_function_index):
res = devpi_server.api('getjson', '/{}/test_function_index'.format(devpi_server.user))
assert json.loads(res) == NEW_INDEX
|
Update devpi server index lookup result to fix integration test
|
Update devpi server index lookup result to fix integration test
|
Python
|
mit
|
manahl/pytest-plugins,manahl/pytest-plugins
|
python
|
## Code Before:
import json
NEW_INDEX = {
'result': {
'acl_toxresult_upload': [':ANONYMOUS:'],
'acl_upload': ['testuser'],
'bases': [],
'mirror_whitelist': [],
'projects': [],
'pypi_whitelist': [],
'type': 'stage',
'volatile': True
},
'type': 'indexconfig'
}
def test_server(devpi_server):
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res) == NEW_INDEX
def test_upload(devpi_server):
pkg_dir = devpi_server.workspace / 'pkg'
pkg_dir.mkdir_p()
setup_py = pkg_dir / 'setup.py'
setup_py.write_text("""
from setuptools import setup
setup(name='test-foo',
version='1.2.3')
""")
pkg_dir.chdir()
devpi_server.api('upload')
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res)['result']['projects'] == ['test-foo']
def test_function_index(devpi_server, devpi_function_index):
res = devpi_server.api('getjson', '/{}/test_function_index'.format(devpi_server.user))
assert json.loads(res) == NEW_INDEX
## Instruction:
Update devpi server index lookup result to fix integration test
## Code After:
import json
NEW_INDEX = {
u"result": {
u"acl_toxresult_upload": [u":ANONYMOUS:"],
u"acl_upload": [u"testuser"],
u"bases": [],
u"mirror_whitelist": [],
u"projects": [],
u"type": u"stage",
u"volatile": True,
},
u"type": u"indexconfig",
}
def test_server(devpi_server):
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res) == NEW_INDEX
def test_upload(devpi_server):
pkg_dir = devpi_server.workspace / 'pkg'
pkg_dir.mkdir_p()
setup_py = pkg_dir / 'setup.py'
setup_py.write_text("""
from setuptools import setup
setup(name='test-foo',
version='1.2.3')
""")
pkg_dir.chdir()
devpi_server.api('upload')
res = devpi_server.api('getjson', '/{}/{}'.format(devpi_server.user, devpi_server.index))
assert json.loads(res)['result']['projects'] == ['test-foo']
def test_function_index(devpi_server, devpi_function_index):
res = devpi_server.api('getjson', '/{}/test_function_index'.format(devpi_server.user))
assert json.loads(res) == NEW_INDEX
|
// ... existing code ...
import json
NEW_INDEX = {
u"result": {
u"acl_toxresult_upload": [u":ANONYMOUS:"],
u"acl_upload": [u"testuser"],
u"bases": [],
u"mirror_whitelist": [],
u"projects": [],
u"type": u"stage",
u"volatile": True,
},
u"type": u"indexconfig",
}
// ... rest of the code ...
|
7208a32b3dfb6cdd73c509add8378a20b31bb5a7
|
libyaul/scu/bus/cpu/cpu_slave.c
|
libyaul/scu/bus/cpu/cpu_slave.c
|
/*
* Copyright (c) 2012-2016
* See LICENSE for details.
*
* Israel Jacquez <[email protected]
*/
#include <sys/cdefs.h>
#include <smpc/smc.h>
#include <cpu/instructions.h>
#include <cpu/frt.h>
#include <cpu/intc.h>
#include <cpu/map.h>
#include <cpu/slave.h>
static void _slave_entry(void);
static void _default_entry(void);
static void (*_entry)(void) = _default_entry;
void
cpu_slave_init(void)
{
smpc_smc_sshoff_call();
cpu_slave_entry_clear();
cpu_intc_ihr_set(INTC_INTERRUPT_SLAVE_ENTRY, _slave_entry);
smpc_smc_sshon_call();
}
void
cpu_slave_entry_set(void (*entry)(void))
{
_entry = (entry != NULL) ? entry : _default_entry;
}
static void
_slave_entry(void)
{
while (true) {
while (((cpu_frt_status_get()) & FRTCS_ICF) == 0x00);
cpu_frt_control_chg((uint8_t)~FRTCS_ICF);
_entry();
}
}
static void
_default_entry(void)
{
}
|
/*
* Copyright (c) 2012-2016
* See LICENSE for details.
*
* Israel Jacquez <[email protected]
*/
#include <sys/cdefs.h>
#include <smpc/smc.h>
#include <cpu/instructions.h>
#include <cpu/frt.h>
#include <cpu/intc.h>
#include <cpu/map.h>
#include <cpu/slave.h>
static void _slave_entry(void);
static void _default_entry(void);
static void (*_entry)(void) = _default_entry;
void
cpu_slave_init(void)
{
smpc_smc_sshoff_call();
cpu_slave_entry_clear();
cpu_intc_ihr_set(INTC_INTERRUPT_SLAVE_ENTRY, _slave_entry);
smpc_smc_sshon_call();
}
void
cpu_slave_entry_set(void (*entry)(void))
{
_entry = (entry != NULL) ? entry : _default_entry;
}
static void __noreturn
_slave_entry(void)
{
while (true) {
while (((cpu_frt_status_get()) & FRTCS_ICF) == 0x00);
cpu_frt_control_chg((uint8_t)~FRTCS_ICF);
_entry();
}
}
static void
_default_entry(void)
{
}
|
Mark _slave_entry to never return
|
Mark _slave_entry to never return
|
C
|
mit
|
ijacquez/libyaul,ijacquez/libyaul,ijacquez/libyaul,ijacquez/libyaul
|
c
|
## Code Before:
/*
* Copyright (c) 2012-2016
* See LICENSE for details.
*
* Israel Jacquez <[email protected]
*/
#include <sys/cdefs.h>
#include <smpc/smc.h>
#include <cpu/instructions.h>
#include <cpu/frt.h>
#include <cpu/intc.h>
#include <cpu/map.h>
#include <cpu/slave.h>
static void _slave_entry(void);
static void _default_entry(void);
static void (*_entry)(void) = _default_entry;
void
cpu_slave_init(void)
{
smpc_smc_sshoff_call();
cpu_slave_entry_clear();
cpu_intc_ihr_set(INTC_INTERRUPT_SLAVE_ENTRY, _slave_entry);
smpc_smc_sshon_call();
}
void
cpu_slave_entry_set(void (*entry)(void))
{
_entry = (entry != NULL) ? entry : _default_entry;
}
static void
_slave_entry(void)
{
while (true) {
while (((cpu_frt_status_get()) & FRTCS_ICF) == 0x00);
cpu_frt_control_chg((uint8_t)~FRTCS_ICF);
_entry();
}
}
static void
_default_entry(void)
{
}
## Instruction:
Mark _slave_entry to never return
## Code After:
/*
* Copyright (c) 2012-2016
* See LICENSE for details.
*
* Israel Jacquez <[email protected]
*/
#include <sys/cdefs.h>
#include <smpc/smc.h>
#include <cpu/instructions.h>
#include <cpu/frt.h>
#include <cpu/intc.h>
#include <cpu/map.h>
#include <cpu/slave.h>
static void _slave_entry(void);
static void _default_entry(void);
static void (*_entry)(void) = _default_entry;
void
cpu_slave_init(void)
{
smpc_smc_sshoff_call();
cpu_slave_entry_clear();
cpu_intc_ihr_set(INTC_INTERRUPT_SLAVE_ENTRY, _slave_entry);
smpc_smc_sshon_call();
}
void
cpu_slave_entry_set(void (*entry)(void))
{
_entry = (entry != NULL) ? entry : _default_entry;
}
static void __noreturn
_slave_entry(void)
{
while (true) {
while (((cpu_frt_status_get()) & FRTCS_ICF) == 0x00);
cpu_frt_control_chg((uint8_t)~FRTCS_ICF);
_entry();
}
}
static void
_default_entry(void)
{
}
|
// ... existing code ...
#include <cpu/slave.h>
static void _slave_entry(void);
static void _default_entry(void);
static void (*_entry)(void) = _default_entry;
// ... modified code ...
_entry = (entry != NULL) ? entry : _default_entry;
}
static void __noreturn
_slave_entry(void)
{
while (true) {
// ... rest of the code ...
|
703a423f4a0aeda7cbeaa542e2f4e0581eee3bda
|
slot/utils.py
|
slot/utils.py
|
import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])
|
import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])
|
Add timezone support to timestamp helper methods
|
Add timezone support to timestamp helper methods
|
Python
|
mit
|
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
|
python
|
## Code Before:
import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])
## Instruction:
Add timezone support to timestamp helper methods
## Code After:
import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])
|
...
import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
...
|
4eb4a2eaa42cd71bf4427bdaaa1e853975432691
|
graphene/storage/intermediate/general_store_manager.py
|
graphene/storage/intermediate/general_store_manager.py
|
from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
|
from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
|
Allow keyword arguments in GeneralStoreManager.create_item method
|
Allow keyword arguments in GeneralStoreManager.create_item method
|
Python
|
apache-2.0
|
PHB-CS123/graphene,PHB-CS123/graphene,PHB-CS123/graphene
|
python
|
## Code Before:
from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
## Instruction:
Allow keyword arguments in GeneralStoreManager.create_item method
## Code After:
from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
|
...
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
...
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
...
|
2f4483440a98f34b650ea09a75f6dc941548f8b2
|
zeus/vcs/db.py
|
zeus/vcs/db.py
|
import asyncpg
class Database:
def __init__(self, host: str, port: int, user: str, password: str, database: str):
self.host = host
self.port = port
self.user = user
self.password = password
self.database = database
self._conn = None
async def connect(self):
self._conn = await asyncpg.connect(
host=self.host,
port=self.port,
user=self.user,
password=self.password,
database=self.database,
)
return self._conn
async def close(self):
if self._conn:
await self._conn.close()
self._conn = None
async def fetch(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.fetch(*args, **kwargs)
async def execute(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.execute(*args, **kwargs)
async def transaction(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return conn.transaction(*args, **kwargs)
|
import asyncpg
class Database:
def __init__(self, host: str, port: int, user: str, password: str, database: str):
self.host = host
self.port = port
self.user = user
self.password = password
self.database = database
self._conn = None
async def connect(self):
self._conn = await asyncpg.connect(
host=self.host,
port=self.port,
user=self.user,
password=self.password,
database=self.database,
# https://github.com/MagicStack/asyncpg/issues/76
# we want to rely on pgbouncer
max_cached_statement_lifetime=0,
)
return self._conn
async def close(self):
if self._conn:
await self._conn.close()
self._conn = None
async def fetch(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.fetch(*args, **kwargs)
async def execute(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.execute(*args, **kwargs)
async def transaction(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return conn.transaction(*args, **kwargs)
|
Disable asyncpg prepared statement cache
|
Disable asyncpg prepared statement cache
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
python
|
## Code Before:
import asyncpg
class Database:
def __init__(self, host: str, port: int, user: str, password: str, database: str):
self.host = host
self.port = port
self.user = user
self.password = password
self.database = database
self._conn = None
async def connect(self):
self._conn = await asyncpg.connect(
host=self.host,
port=self.port,
user=self.user,
password=self.password,
database=self.database,
)
return self._conn
async def close(self):
if self._conn:
await self._conn.close()
self._conn = None
async def fetch(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.fetch(*args, **kwargs)
async def execute(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.execute(*args, **kwargs)
async def transaction(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return conn.transaction(*args, **kwargs)
## Instruction:
Disable asyncpg prepared statement cache
## Code After:
import asyncpg
class Database:
def __init__(self, host: str, port: int, user: str, password: str, database: str):
self.host = host
self.port = port
self.user = user
self.password = password
self.database = database
self._conn = None
async def connect(self):
self._conn = await asyncpg.connect(
host=self.host,
port=self.port,
user=self.user,
password=self.password,
database=self.database,
# https://github.com/MagicStack/asyncpg/issues/76
# we want to rely on pgbouncer
max_cached_statement_lifetime=0,
)
return self._conn
async def close(self):
if self._conn:
await self._conn.close()
self._conn = None
async def fetch(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.fetch(*args, **kwargs)
async def execute(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return await conn.execute(*args, **kwargs)
async def transaction(self, *args, **kwargs):
if not self._conn:
conn = await self.connect()
else:
conn = self._conn
return conn.transaction(*args, **kwargs)
|
// ... existing code ...
user=self.user,
password=self.password,
database=self.database,
# https://github.com/MagicStack/asyncpg/issues/76
# we want to rely on pgbouncer
max_cached_statement_lifetime=0,
)
return self._conn
// ... rest of the code ...
|
79d1ab43d187d8ba1350965673b930fa0b3879b6
|
rosbridge_suite/rosbridge_library/src/rosbridge_library/internal/pngcompression.py
|
rosbridge_suite/rosbridge_library/src/rosbridge_library/internal/pngcompression.py
|
from pypng.code import png
from base64 import standard_b64encode, standard_b64decode
from StringIO import StringIO
def encode(string):
""" PNG-compress the string, return the b64 encoded bytes """
bytes = list(bytearray(string))
png_image = png.from_array([bytes], 'L')
buff = StringIO()
png_image.save(buff)
encoded = standard_b64encode(buff.getvalue())
return encoded
def decode(string):
""" b64 decode the string, then PNG-decompress """
decoded = standard_b64decode(string)
reader = png.Reader(bytes=decoded)
width, height, rawpixels, metadata = reader.read()
pixels = list(rawpixels)[0]
return str(bytearray(pixels))
|
from pypng.code import png
from PIL import Image
from base64 import standard_b64encode, standard_b64decode
from StringIO import StringIO
def encode(string):
""" PNG-compress the string, return the b64 encoded bytes """
i = Image.fromstring('L', (len(string), 1), string)
buff = StringIO()
i.save(buff, "png")
encoded = standard_b64encode(buff.getvalue())
return encoded
def decode(string):
""" b64 decode the string, then PNG-decompress """
decoded = standard_b64decode(string)
reader = png.Reader(bytes=decoded)
width, height, rawpixels, metadata = reader.read()
pixels = list(rawpixels)[0]
return str(bytearray(pixels))
|
Use python imaging library to encode PNG instead of pypng
|
Use python imaging library to encode PNG instead of pypng
|
Python
|
bsd-3-clause
|
WangRobo/rosbridge_suite,vladrotea/rosbridge_suite,kbendick/rosbridge_suite,vladrotea/rosbridge_suite,RobotWebTools/rosbridge_suite,DLu/rosbridge_suite,SNU-Sigma/rosbridge_suite,DLu/rosbridge_suite,DLu/rosbridge_suite,mayfieldrobotics/rosbridge_suite,mayfieldrobotics/rosbridge_suite,WangRobo/rosbridge_suite,SNU-Sigma/rosbridge_suite,WangRobo/rosbridge_suite,kbendick/rosbridge_suite,SNU-Sigma/rosbridge_suite,kbendick/rosbridge_suite,SNU-Sigma/rosbridge_suite,vladrotea/rosbridge_suite,mayfieldrobotics/rosbridge_suite
|
python
|
## Code Before:
from pypng.code import png
from base64 import standard_b64encode, standard_b64decode
from StringIO import StringIO
def encode(string):
""" PNG-compress the string, return the b64 encoded bytes """
bytes = list(bytearray(string))
png_image = png.from_array([bytes], 'L')
buff = StringIO()
png_image.save(buff)
encoded = standard_b64encode(buff.getvalue())
return encoded
def decode(string):
""" b64 decode the string, then PNG-decompress """
decoded = standard_b64decode(string)
reader = png.Reader(bytes=decoded)
width, height, rawpixels, metadata = reader.read()
pixels = list(rawpixels)[0]
return str(bytearray(pixels))
## Instruction:
Use python imaging library to encode PNG instead of pypng
## Code After:
from pypng.code import png
from PIL import Image
from base64 import standard_b64encode, standard_b64decode
from StringIO import StringIO
def encode(string):
""" PNG-compress the string, return the b64 encoded bytes """
i = Image.fromstring('L', (len(string), 1), string)
buff = StringIO()
i.save(buff, "png")
encoded = standard_b64encode(buff.getvalue())
return encoded
def decode(string):
""" b64 decode the string, then PNG-decompress """
decoded = standard_b64decode(string)
reader = png.Reader(bytes=decoded)
width, height, rawpixels, metadata = reader.read()
pixels = list(rawpixels)[0]
return str(bytearray(pixels))
|
# ... existing code ...
from pypng.code import png
from PIL import Image
from base64 import standard_b64encode, standard_b64decode
from StringIO import StringIO
# ... modified code ...
def encode(string):
""" PNG-compress the string, return the b64 encoded bytes """
i = Image.fromstring('L', (len(string), 1), string)
buff = StringIO()
i.save(buff, "png")
encoded = standard_b64encode(buff.getvalue())
return encoded
# ... rest of the code ...
|
df18229b38a01d87076f3b13aee5bfd1f0f989c2
|
tunobase/blog/models.py
|
tunobase/blog/models.py
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
Update blog model with a more descriptive name
|
Update blog model with a more descriptive name
|
Python
|
bsd-3-clause
|
unomena/tunobase,unomena/tunobase
|
python
|
## Code Before:
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
## Instruction:
Update blog model with a more descriptive name
## Code After:
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
# ... existing code ...
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
# ... rest of the code ...
|
7127d138bacf507360b6b8c0386187d2e1be32a6
|
ifilter/__init__.py
|
ifilter/__init__.py
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
Add finally block for deletion of temp file
|
Add finally block for deletion of temp file
|
Python
|
apache-2.0
|
stefan-hudelmaier/ifilter
|
python
|
## Code Before:
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
## Instruction:
Add finally block for deletion of temp file
## Code After:
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
# ... existing code ...
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
# ... rest of the code ...
|
920bcddc0279ecaa62f489fe12d9514bf94698d7
|
subprojects/base-services/src/main/java/org/gradle/api/specs/OrSpec.java
|
subprojects/base-services/src/main/java/org/gradle/api/specs/OrSpec.java
|
/*
* Copyright 2007-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.specs;
import java.util.List;
/**
* A {@link CompositeSpec} which requires any one of its specs to be true in order to evaluate to
* true. Uses lazy evaluation.
*
* @param <T> The target type for this Spec
*/
public class OrSpec<T> extends CompositeSpec<T> {
public OrSpec(Spec<? super T>... specs) {
super(specs);
}
public OrSpec(Iterable<? extends Spec<? super T>> specs) {
super(specs);
}
public boolean isSatisfiedBy(T object) {
List<Integer> p = new ArrayList<>();
List<Spec<? super T>> specs = getSpecs();
if (specs.isEmpty()) {
return true;
}
for (Spec<? super T> spec : specs) {
if (spec.isSatisfiedBy(object)) {
return true;
}
}
return false;
}
}
|
/*
* Copyright 2007-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.specs;
import java.util.List;
/**
* A {@link CompositeSpec} which requires any one of its specs to be true in order to evaluate to
* true. Uses lazy evaluation.
*
* @param <T> The target type for this Spec
*/
public class OrSpec<T> extends CompositeSpec<T> {
public OrSpec(Spec<? super T>... specs) {
super(specs);
}
public OrSpec(Iterable<? extends Spec<? super T>> specs) {
super(specs);
}
public boolean isSatisfiedBy(T object) {
List<Spec<? super T>> specs = getSpecs();
if (specs.isEmpty()) {
return true;
}
for (Spec<? super T> spec : specs) {
if (spec.isSatisfiedBy(object)) {
return true;
}
}
return false;
}
}
|
Revert "Using java7+ feature to verify java6 sanity check"
|
Revert "Using java7+ feature to verify java6 sanity check"
This reverts commit 0ac6d15118c0240d27fe3efd76eed03956442bd0.
|
Java
|
apache-2.0
|
gradle/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,gstevey/gradle,gstevey/gradle,robinverduijn/gradle,gstevey/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,lsmaira/gradle
|
java
|
## Code Before:
/*
* Copyright 2007-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.specs;
import java.util.List;
/**
* A {@link CompositeSpec} which requires any one of its specs to be true in order to evaluate to
* true. Uses lazy evaluation.
*
* @param <T> The target type for this Spec
*/
public class OrSpec<T> extends CompositeSpec<T> {
public OrSpec(Spec<? super T>... specs) {
super(specs);
}
public OrSpec(Iterable<? extends Spec<? super T>> specs) {
super(specs);
}
public boolean isSatisfiedBy(T object) {
List<Integer> p = new ArrayList<>();
List<Spec<? super T>> specs = getSpecs();
if (specs.isEmpty()) {
return true;
}
for (Spec<? super T> spec : specs) {
if (spec.isSatisfiedBy(object)) {
return true;
}
}
return false;
}
}
## Instruction:
Revert "Using java7+ feature to verify java6 sanity check"
This reverts commit 0ac6d15118c0240d27fe3efd76eed03956442bd0.
## Code After:
/*
* Copyright 2007-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.specs;
import java.util.List;
/**
* A {@link CompositeSpec} which requires any one of its specs to be true in order to evaluate to
* true. Uses lazy evaluation.
*
* @param <T> The target type for this Spec
*/
public class OrSpec<T> extends CompositeSpec<T> {
public OrSpec(Spec<? super T>... specs) {
super(specs);
}
public OrSpec(Iterable<? extends Spec<? super T>> specs) {
super(specs);
}
public boolean isSatisfiedBy(T object) {
List<Spec<? super T>> specs = getSpecs();
if (specs.isEmpty()) {
return true;
}
for (Spec<? super T> spec : specs) {
if (spec.isSatisfiedBy(object)) {
return true;
}
}
return false;
}
}
|
// ... existing code ...
}
public boolean isSatisfiedBy(T object) {
List<Spec<? super T>> specs = getSpecs();
if (specs.isEmpty()) {
return true;
// ... rest of the code ...
|
550fedc513aab5feec3aaf43a49df5082a1e5dda
|
incuna_test_utils/testcases/urls.py
|
incuna_test_utils/testcases/urls.py
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
Add simple URLTestMixin and URLTestCase classes
|
Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcases
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
python
|
## Code Before:
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
## Instruction:
Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcases
## Code After:
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
// ... existing code ...
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
// ... rest of the code ...
|
6c6d7e3dc2c61b13d17f30ddd7607a4dfb2ef86d
|
nova/policies/migrate_server.py
|
nova/policies/migrate_server.py
|
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-migrate-server:%s'
migrate_server_policies = [
policy.DocumentedRuleDefault(
POLICY_ROOT % 'migrate',
base.RULE_ADMIN_API,
"Cold migrate a server to a host",
[
{
'method': 'POST',
'path': '/servers/{server_id}/action (migrate)'
}
]),
policy.DocumentedRuleDefault(
POLICY_ROOT % 'migrate_live',
base.RULE_ADMIN_API,
"Live migrate a server to a new host without a reboot",
[
{
'method': 'POST',
'path': '/servers/{server_id}/action (os-migrateLive)'
}
]),
]
def list_rules():
return migrate_server_policies
|
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-migrate-server:%s'
migrate_server_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'migrate',
check_str=base.RULE_ADMIN_API,
description="Cold migrate a server to a host",
operations=[
{
'method': 'POST',
'path': '/servers/{server_id}/action (migrate)'
}
],
scope_types=['system', 'project']),
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'migrate_live',
check_str=base.RULE_ADMIN_API,
description="Live migrate a server to a new host without a reboot",
operations=[
{
'method': 'POST',
'path': '/servers/{server_id}/action (os-migrateLive)'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return migrate_server_policies
|
Introduce scope_types in migrate server
|
Introduce scope_types in migrate server
oslo.policy introduced the scope_type feature which can
control the access level at system-level and project-level.
- https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope
- http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for migrate server API policies
as 'system'.
Also adds the test case with scope_type enabled and verify we
pass and fail the policy check with expected context.
Partial implement blueprint policy-defaults-refresh
Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be
|
Python
|
apache-2.0
|
klmitch/nova,openstack/nova,openstack/nova,mahak/nova,klmitch/nova,mahak/nova,klmitch/nova,openstack/nova,mahak/nova,klmitch/nova
|
python
|
## Code Before:
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-migrate-server:%s'
migrate_server_policies = [
policy.DocumentedRuleDefault(
POLICY_ROOT % 'migrate',
base.RULE_ADMIN_API,
"Cold migrate a server to a host",
[
{
'method': 'POST',
'path': '/servers/{server_id}/action (migrate)'
}
]),
policy.DocumentedRuleDefault(
POLICY_ROOT % 'migrate_live',
base.RULE_ADMIN_API,
"Live migrate a server to a new host without a reboot",
[
{
'method': 'POST',
'path': '/servers/{server_id}/action (os-migrateLive)'
}
]),
]
def list_rules():
return migrate_server_policies
## Instruction:
Introduce scope_types in migrate server
oslo.policy introduced the scope_type feature which can
control the access level at system-level and project-level.
- https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope
- http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for migrate server API policies
as 'system'.
Also adds the test case with scope_type enabled and verify we
pass and fail the policy check with expected context.
Partial implement blueprint policy-defaults-refresh
Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be
## Code After:
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-migrate-server:%s'
migrate_server_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'migrate',
check_str=base.RULE_ADMIN_API,
description="Cold migrate a server to a host",
operations=[
{
'method': 'POST',
'path': '/servers/{server_id}/action (migrate)'
}
],
scope_types=['system', 'project']),
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'migrate_live',
check_str=base.RULE_ADMIN_API,
description="Live migrate a server to a new host without a reboot",
operations=[
{
'method': 'POST',
'path': '/servers/{server_id}/action (os-migrateLive)'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return migrate_server_policies
|
// ... existing code ...
migrate_server_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'migrate',
check_str=base.RULE_ADMIN_API,
description="Cold migrate a server to a host",
operations=[
{
'method': 'POST',
'path': '/servers/{server_id}/action (migrate)'
}
],
scope_types=['system', 'project']),
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'migrate_live',
check_str=base.RULE_ADMIN_API,
description="Live migrate a server to a new host without a reboot",
operations=[
{
'method': 'POST',
'path': '/servers/{server_id}/action (os-migrateLive)'
}
],
scope_types=['system', 'project']),
]
// ... rest of the code ...
|
b14ec035f6a4890ce85504f449402aec857227fe
|
cla_backend/apps/status/tests/smoketests.py
|
cla_backend/apps/status/tests/smoketests.py
|
import unittest
from celery import Celery
from django.conf import settings
from django.db import connection
class SmokeTests(unittest.TestCase):
def setUp(self):
pass
def test_can_access_db(self):
"access the database"
cursor = connection.cursor()
cursor.execute('SELECT 1')
row = cursor.fetchone()
self.assertEqual(1, row[0])
def test_can_access_celery(self):
"connect to SQS"
if not getattr(settings, 'CELERY_ALWAYS_EAGER', False):
conn = Celery('cla_backend').connection()
conn.config_from_object('django.conf:settings')
conn.connect()
conn.release()
|
import unittest
from celery import Celery
from django.conf import settings
from django.db import connection
class SmokeTests(unittest.TestCase):
def setUp(self):
pass
def test_can_access_db(self):
"access the database"
cursor = connection.cursor()
cursor.execute('SELECT 1')
row = cursor.fetchone()
self.assertEqual(1, row[0])
def test_can_access_celery(self):
"connect to SQS"
if not getattr(settings, 'CELERY_ALWAYS_EAGER', False):
app = Celery('cla_backend')
app.config_from_object('django.conf:settings')
app.connection().connect()
conn.connect()
conn.release()
|
Configure Celery correctly in smoketest
|
Configure Celery correctly in smoketest
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
python
|
## Code Before:
import unittest
from celery import Celery
from django.conf import settings
from django.db import connection
class SmokeTests(unittest.TestCase):
def setUp(self):
pass
def test_can_access_db(self):
"access the database"
cursor = connection.cursor()
cursor.execute('SELECT 1')
row = cursor.fetchone()
self.assertEqual(1, row[0])
def test_can_access_celery(self):
"connect to SQS"
if not getattr(settings, 'CELERY_ALWAYS_EAGER', False):
conn = Celery('cla_backend').connection()
conn.config_from_object('django.conf:settings')
conn.connect()
conn.release()
## Instruction:
Configure Celery correctly in smoketest
## Code After:
import unittest
from celery import Celery
from django.conf import settings
from django.db import connection
class SmokeTests(unittest.TestCase):
def setUp(self):
pass
def test_can_access_db(self):
"access the database"
cursor = connection.cursor()
cursor.execute('SELECT 1')
row = cursor.fetchone()
self.assertEqual(1, row[0])
def test_can_access_celery(self):
"connect to SQS"
if not getattr(settings, 'CELERY_ALWAYS_EAGER', False):
app = Celery('cla_backend')
app.config_from_object('django.conf:settings')
app.connection().connect()
conn.connect()
conn.release()
|
...
def test_can_access_celery(self):
"connect to SQS"
if not getattr(settings, 'CELERY_ALWAYS_EAGER', False):
app = Celery('cla_backend')
app.config_from_object('django.conf:settings')
app.connection().connect()
conn.connect()
conn.release()
...
|
293f44e211e4f26a0b7eca842dd2af515957a4bd
|
octavia/certificates/generator/cert_gen.py
|
octavia/certificates/generator/cert_gen.py
|
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for signing TLS certificates.
"""
@abc.abstractmethod
def sign_cert(self, csr, validity):
"""Generates a signed certificate from the provided CSR
This call is designed to block until a signed certificate can be
returned.
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: Signed certificate
:raises Exception: If certificate signing fails
"""
pass
|
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for generating private keys,
generating CSRs, and signing TLS certificates.
"""
@abc.abstractmethod
def sign_cert(self, csr, validity):
"""Generates a signed certificate from the provided CSR
This call is designed to block until a signed certificate can be
returned.
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: PEM Encoded Signed certificate
:raises Exception: If certificate signing fails
"""
pass
@abc.abstractmethod
def generate_cert_key_pair(self, cn, validity, bit_length, passphrase):
"""Generates a private key and certificate pair
:param cn: Common name to use for the Certificate
:param validity: Validity period for the Certificate
:param bit_length: Private key bit length
:param passphrase: Passphrase to use for encrypting the private key
:return: octavia.certificates.common.Cert representation of the
certificate data
:raises Exception: If generation fails
"""
pass
|
Add Cert+PK generation to Certificate Interface
|
Add Cert+PK generation to Certificate Interface
Change-Id: I82aa573c7db13c7a491b18540379b234c1023eb9
|
Python
|
apache-2.0
|
openstack/octavia,openstack/octavia,openstack/octavia
|
python
|
## Code Before:
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for signing TLS certificates.
"""
@abc.abstractmethod
def sign_cert(self, csr, validity):
"""Generates a signed certificate from the provided CSR
This call is designed to block until a signed certificate can be
returned.
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: Signed certificate
:raises Exception: If certificate signing fails
"""
pass
## Instruction:
Add Cert+PK generation to Certificate Interface
Change-Id: I82aa573c7db13c7a491b18540379b234c1023eb9
## Code After:
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for generating private keys,
generating CSRs, and signing TLS certificates.
"""
@abc.abstractmethod
def sign_cert(self, csr, validity):
"""Generates a signed certificate from the provided CSR
This call is designed to block until a signed certificate can be
returned.
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: PEM Encoded Signed certificate
:raises Exception: If certificate signing fails
"""
pass
@abc.abstractmethod
def generate_cert_key_pair(self, cn, validity, bit_length, passphrase):
"""Generates a private key and certificate pair
:param cn: Common name to use for the Certificate
:param validity: Validity period for the Certificate
:param bit_length: Private key bit length
:param passphrase: Passphrase to use for encrypting the private key
:return: octavia.certificates.common.Cert representation of the
certificate data
:raises Exception: If generation fails
"""
pass
|
...
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for generating private keys,
generating CSRs, and signing TLS certificates.
"""
@abc.abstractmethod
...
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: PEM Encoded Signed certificate
:raises Exception: If certificate signing fails
"""
pass
@abc.abstractmethod
def generate_cert_key_pair(self, cn, validity, bit_length, passphrase):
"""Generates a private key and certificate pair
:param cn: Common name to use for the Certificate
:param validity: Validity period for the Certificate
:param bit_length: Private key bit length
:param passphrase: Passphrase to use for encrypting the private key
:return: octavia.certificates.common.Cert representation of the
certificate data
:raises Exception: If generation fails
"""
pass
...
|
113fe8c84d7aff1577a9fadf8fa4650a31ea9307
|
src/dataIO.py
|
src/dataIO.py
|
import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
|
import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
|
Add off file reader with 3d resampling
|
Add off file reader with 3d resampling
|
Python
|
mit
|
meetshah1995/tf-3dgan
|
python
|
## Code Before:
import numpy as np
def testOFFReader():
path = '../sample-data/chair.off'
raw_data = tuple(open(path, 'r'))
header = raw_data.strip(' ')[:-1]
n_vertices, n_faces = header[0], header[1]
if __name__ == '__main__':
a = testOFFReader()
print a
## Instruction:
Add off file reader with 3d resampling
## Code After:
import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
|
# ... existing code ...
import trimesh
import sys
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from stl import mesh
from mpl_toolkits import mplot3d
def getVerticesFaces(path):
raw_data = tuple(open(path, 'r'))
header = raw_data[1].split()
n_vertices = int(header[0])
n_faces = int(header[1])
vertices = np.asarray([map(float,raw_data[i+2].split()) for i in range(n_vertices)])
faces = np.asarray([map(int,raw_data[i+2+n_vertices].split()) for i in range(n_faces)])
return vertices, faces
def plot(vertices, faces):
input_vec = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
input_vec.vectors[i][j] = vertices[f[j],:]
figure = plt.figure()
axes = mplot3d.Axes3D(figure)
axes.add_collection3d(mplot3d.art3d.Poly3DCollection(input_vec.vectors))
scale = input_vec.points.flatten(-1)
axes.auto_scale_xyz(scale, scale, scale)
plt.show()
def binaryPlot(voxels):
z,x,y = voxels.nonzero()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, -z, zdir='z', c= 'red')
plt.show()
def discretePlot(vertices):
figure = pyplot.figure()
axes = mplot3d.Axes3D(figure)
axes.scatter(vertices.T[0,:],vertices.T[1,:],vertices.T[2,:])
pyplot.show()
def getVolume(path, sideLen=32):
mesh = trimesh.load(path)
volume = trimesh.voxel.Voxel(mesh, 0.5).raw
(x, y, z) = map(float, volume.shape)
volume = nd.zoom(volume.astype(float),
(sideLen/x, sideLen/y, sideLen/z),
order=1,
mode='nearest')
volume[np.nonzero(volume)] = 1.0
return volume.astype(np.bool)
if __name__ == '__main__':
path = sys.argv[1]
volume = getVolume(path)
binaryPlot(volume)
# ... rest of the code ...
|
bf70f8e3235c140589e9b0110b34da8427ab409b
|
child_sync_typo3/wizard/delegate_child_wizard.py
|
child_sync_typo3/wizard/delegate_child_wizard.py
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Fix res returned on delegate
|
Fix res returned on delegate
|
Python
|
agpl-3.0
|
MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland
|
python
|
## Code Before:
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
## Instruction:
Fix res returned on delegate
## Code After:
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
...
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
...
|
7ae78b4098bf1851ab97080c7b29ec2a81eff675
|
src/java/nxt/db/DerivedDbTable.java
|
src/java/nxt/db/DerivedDbTable.java
|
package nxt.db;
import nxt.Nxt;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
public abstract class DerivedDbTable {
protected final String table;
protected DerivedDbTable(String table) {
this.table = table;
Nxt.getBlockchainProcessor().registerDerivedTable(this);
}
public void rollback(int height) {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
PreparedStatement pstmtDelete = con.prepareStatement("DELETE FROM " + table + " WHERE height > ?")) {
pstmtDelete.setInt(1, height);
pstmtDelete.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void truncate() {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
Statement stmt = con.createStatement()) {
stmt.executeUpdate("TRUNCATE TABLE " + table);
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void trim(int height) {
//nothing to trim
}
public void finish() {
}
}
|
package nxt.db;
import nxt.Nxt;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
public abstract class DerivedDbTable {
protected final String table;
protected DerivedDbTable(String table) {
this.table = table;
Nxt.getBlockchainProcessor().registerDerivedTable(this);
}
public void rollback(int height) {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
PreparedStatement pstmtDelete = con.prepareStatement("DELETE FROM " + table + " WHERE height > ?")) {
pstmtDelete.setInt(1, height);
pstmtDelete.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void truncate() {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
Statement stmt = con.createStatement()) {
stmt.executeUpdate("DELETE FROM " + table);
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void trim(int height) {
//nothing to trim
}
public void finish() {
}
}
|
Change truncate to delete from in db abstraction. Truncate is not getting rolled back in the new h2 db version, leaving things in a dirty state after failed block application.
|
Change truncate to delete from in db abstraction. Truncate is not getting rolled back in the new h2 db version, leaving things in a dirty state after failed block application.
|
Java
|
mit
|
burst-team/burstcoin,burst-team/burstcoin,burst-team/burstcoin,burst-team/burstcoin,burst-team/burstcoin
|
java
|
## Code Before:
package nxt.db;
import nxt.Nxt;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
public abstract class DerivedDbTable {
protected final String table;
protected DerivedDbTable(String table) {
this.table = table;
Nxt.getBlockchainProcessor().registerDerivedTable(this);
}
public void rollback(int height) {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
PreparedStatement pstmtDelete = con.prepareStatement("DELETE FROM " + table + " WHERE height > ?")) {
pstmtDelete.setInt(1, height);
pstmtDelete.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void truncate() {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
Statement stmt = con.createStatement()) {
stmt.executeUpdate("TRUNCATE TABLE " + table);
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void trim(int height) {
//nothing to trim
}
public void finish() {
}
}
## Instruction:
Change truncate to delete from in db abstraction. Truncate is not getting rolled back in the new h2 db version, leaving things in a dirty state after failed block application.
## Code After:
package nxt.db;
import nxt.Nxt;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
public abstract class DerivedDbTable {
protected final String table;
protected DerivedDbTable(String table) {
this.table = table;
Nxt.getBlockchainProcessor().registerDerivedTable(this);
}
public void rollback(int height) {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
PreparedStatement pstmtDelete = con.prepareStatement("DELETE FROM " + table + " WHERE height > ?")) {
pstmtDelete.setInt(1, height);
pstmtDelete.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void truncate() {
if (!Db.isInTransaction()) {
throw new IllegalStateException("Not in transaction");
}
try (Connection con = Db.getConnection();
Statement stmt = con.createStatement()) {
stmt.executeUpdate("DELETE FROM " + table);
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
}
public void trim(int height) {
//nothing to trim
}
public void finish() {
}
}
|
# ... existing code ...
}
try (Connection con = Db.getConnection();
Statement stmt = con.createStatement()) {
stmt.executeUpdate("DELETE FROM " + table);
} catch (SQLException e) {
throw new RuntimeException(e.toString(), e);
}
# ... rest of the code ...
|
db8e02661df65e1a50c5810968afef7ecd44db42
|
braid/bazaar.py
|
braid/bazaar.py
|
import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
# FIXME (https://github.com/twisted-infra/braid/issues/5)
# We currently don't check that this the correct branch
run('bzr update {}'.format(location))
|
import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
run('bzr pull --overwrite -d {} {}'.format(location, branch))
|
Make bzr always pull from the specified remote.
|
Make bzr always pull from the specified remote.
Refs: #5.
|
Python
|
mit
|
alex/braid,alex/braid
|
python
|
## Code Before:
import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
# FIXME (https://github.com/twisted-infra/braid/issues/5)
# We currently don't check that this the correct branch
run('bzr update {}'.format(location))
## Instruction:
Make bzr always pull from the specified remote.
Refs: #5.
## Code After:
import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
run('bzr pull --overwrite -d {} {}'.format(location, branch))
|
...
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
run('bzr pull --overwrite -d {} {}'.format(location, branch))
...
|
5813dfab08a4fd763269af27100486d203071684
|
ktor-samples/ktor-samples-async/src/org/jetbrains/ktor/samples/async/AsyncApplication.kt
|
ktor-samples/ktor-samples-async/src/org/jetbrains/ktor/samples/async/AsyncApplication.kt
|
package org.jetbrains.ktor.samples.async
import kotlinx.html.*
import kotlinx.html.stream.*
import org.jetbrains.ktor.application.*
import org.jetbrains.ktor.http.*
import org.jetbrains.ktor.routing.*
import java.util.*
import java.util.concurrent.*
import kotlin.util.*
class AsyncApplication(config: ApplicationConfig) : Application(config) {
val executor: ScheduledExecutorService by lazy { Executors.newScheduledThreadPool(4) }
init {
routing {
get("/") {
executor.submit { handleLongCalculation() }
ApplicationRequestStatus.Asynchronous
}
get("/bye") {
response.sendText("Goodbye World!")
}
}
}
private fun ApplicationRequestContext.handleLongCalculation() {
var number = 0
val random = Random()
val time = measureTimeMillis {
for (index in 0..300) {
Thread.sleep(10)
number += random.nextInt(100)
}
}
response.contentType(ContentType.Text.Html)
response.write {
appendHTML().html {
head {
title { +"Async World" }
}
body {
h1 {
+"We calculated this after ${time}ms: $number"
}
}
}
}
close()
}
}
|
package org.jetbrains.ktor.samples.async
import kotlinx.html.*
import kotlinx.html.stream.*
import org.jetbrains.ktor.application.*
import org.jetbrains.ktor.http.*
import org.jetbrains.ktor.routing.*
import java.util.*
import java.util.concurrent.*
class AsyncApplication(config: ApplicationConfig) : Application(config) {
val executor: ScheduledExecutorService by lazy { Executors.newScheduledThreadPool(4) }
init {
routing {
get("/{...}") {
val start = System.currentTimeMillis()
executor.submit { handleLongCalculation(start) }
ApplicationRequestStatus.Asynchronous
}
}
}
private fun ApplicationRequestContext.handleLongCalculation(start: Long) {
val queue = System.currentTimeMillis() - start
var number = 0
val random = Random()
for (index in 0..300) {
Thread.sleep(10)
number += random.nextInt(100)
}
val time = System.currentTimeMillis() - start
response.contentType(ContentType.Text.Html)
response.write {
appendHTML().html {
head {
title { +"Async World" }
}
body {
h1 {
+"We calculated this after ${time}ms (${queue}ms in queue): $number"
}
}
}
}
close()
}
}
|
Improve Async sample with possibility to test it from browser and report time in queue.
|
Improve Async sample with possibility to test it from browser and report time in queue.
|
Kotlin
|
apache-2.0
|
ktorio/ktor,ktorio/ktor,ktorio/ktor,ktorio/ktor
|
kotlin
|
## Code Before:
package org.jetbrains.ktor.samples.async
import kotlinx.html.*
import kotlinx.html.stream.*
import org.jetbrains.ktor.application.*
import org.jetbrains.ktor.http.*
import org.jetbrains.ktor.routing.*
import java.util.*
import java.util.concurrent.*
import kotlin.util.*
class AsyncApplication(config: ApplicationConfig) : Application(config) {
val executor: ScheduledExecutorService by lazy { Executors.newScheduledThreadPool(4) }
init {
routing {
get("/") {
executor.submit { handleLongCalculation() }
ApplicationRequestStatus.Asynchronous
}
get("/bye") {
response.sendText("Goodbye World!")
}
}
}
private fun ApplicationRequestContext.handleLongCalculation() {
var number = 0
val random = Random()
val time = measureTimeMillis {
for (index in 0..300) {
Thread.sleep(10)
number += random.nextInt(100)
}
}
response.contentType(ContentType.Text.Html)
response.write {
appendHTML().html {
head {
title { +"Async World" }
}
body {
h1 {
+"We calculated this after ${time}ms: $number"
}
}
}
}
close()
}
}
## Instruction:
Improve Async sample with possibility to test it from browser and report time in queue.
## Code After:
package org.jetbrains.ktor.samples.async
import kotlinx.html.*
import kotlinx.html.stream.*
import org.jetbrains.ktor.application.*
import org.jetbrains.ktor.http.*
import org.jetbrains.ktor.routing.*
import java.util.*
import java.util.concurrent.*
class AsyncApplication(config: ApplicationConfig) : Application(config) {
val executor: ScheduledExecutorService by lazy { Executors.newScheduledThreadPool(4) }
init {
routing {
get("/{...}") {
val start = System.currentTimeMillis()
executor.submit { handleLongCalculation(start) }
ApplicationRequestStatus.Asynchronous
}
}
}
private fun ApplicationRequestContext.handleLongCalculation(start: Long) {
val queue = System.currentTimeMillis() - start
var number = 0
val random = Random()
for (index in 0..300) {
Thread.sleep(10)
number += random.nextInt(100)
}
val time = System.currentTimeMillis() - start
response.contentType(ContentType.Text.Html)
response.write {
appendHTML().html {
head {
title { +"Async World" }
}
body {
h1 {
+"We calculated this after ${time}ms (${queue}ms in queue): $number"
}
}
}
}
close()
}
}
|
...
import org.jetbrains.ktor.routing.*
import java.util.*
import java.util.concurrent.*
class AsyncApplication(config: ApplicationConfig) : Application(config) {
val executor: ScheduledExecutorService by lazy { Executors.newScheduledThreadPool(4) }
...
init {
routing {
get("/{...}") {
val start = System.currentTimeMillis()
executor.submit { handleLongCalculation(start) }
ApplicationRequestStatus.Asynchronous
}
}
}
private fun ApplicationRequestContext.handleLongCalculation(start: Long) {
val queue = System.currentTimeMillis() - start
var number = 0
val random = Random()
for (index in 0..300) {
Thread.sleep(10)
number += random.nextInt(100)
}
val time = System.currentTimeMillis() - start
response.contentType(ContentType.Text.Html)
response.write {
...
}
body {
h1 {
+"We calculated this after ${time}ms (${queue}ms in queue): $number"
}
}
}
...
|
b097675e5906f7b0e9c050110fea58e40491814b
|
music/api.py
|
music/api.py
|
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
Allow filtering and ordering on API
|
Allow filtering and ordering on API
|
Python
|
bsd-3-clause
|
praekelt/jmbo-music,praekelt/jmbo-music
|
python
|
## Code Before:
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
## Instruction:
Allow filtering and ordering on API
## Code After:
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
class TrackResource(ModelBaseResource):
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<slug>[\w-]+)/$" % self._meta.resource_name, self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
|
...
from django.conf.urls.defaults import url
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from jmbo.api import ModelBaseResource
from music.models import Track
...
class Meta:
queryset = Track.permitted.all()
resource_name = 'track'
filtering = {
'last_played': ALL
}
ordering = ['last_played']
def override_urls(self):
...
|
8b628ce91040736e1cb33a544871925608c70479
|
penchy/jobs/dependency.py
|
penchy/jobs/dependency.py
|
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
|
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
Add edgesort as a frontend to topological_sort.
|
Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <[email protected]>
|
Python
|
mit
|
fhirschmann/penchy,fhirschmann/penchy
|
python
|
## Code Before:
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
## Instruction:
Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <[email protected]>
## Code After:
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
# ... existing code ...
from penchy.util import topological_sort
class Edge(object):
# ... modified code ...
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
# ... rest of the code ...
|
5f9c2e6d0531284903bcfe797158b95941d7a946
|
shims/test/hadoop23/org/apache/pig/test/SparkMiniCluster.java
|
shims/test/hadoop23/org/apache/pig/test/SparkMiniCluster.java
|
package org.apache.pig.test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.pig.ExecType;
import org.apache.pig.backend.hadoop.executionengine.spark.SparkExecType;
public class SparkMiniCluster extends MiniGenericCluster {
private static final File CONF_DIR = new File("build/classes");
private static final File CONF_FILE = new File(CONF_DIR, "hadoop-site.xml");
private ExecType spark = new SparkExecType();
SparkMiniCluster() {
}
@Override
public ExecType getExecType() {
return spark;
}
@Override
protected void setupMiniDfsAndMrClusters() {
try {
CONF_DIR.mkdirs();
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
m_conf = new Configuration();
m_conf.set("io.sort.mb", "1");
m_conf.writeXml(new FileOutputStream(CONF_FILE));
int dataNodes = 4;
m_dfs = new MiniDFSCluster(m_conf, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void shutdownMiniMrClusters() {
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
}
}
|
package org.apache.pig.test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.pig.ExecType;
import org.apache.pig.backend.hadoop.executionengine.spark.SparkExecType;
public class SparkMiniCluster extends MiniGenericCluster {
private static final File CONF_DIR = new File("build/classes");
private static final File CONF_FILE = new File(CONF_DIR, "hadoop-site.xml");
private ExecType spark = new SparkExecType();
SparkMiniCluster() {
}
@Override
public ExecType getExecType() {
return spark;
}
@Override
protected void setupMiniDfsAndMrClusters() {
try {
CONF_DIR.mkdirs();
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
m_conf = new Configuration();
m_conf.set("io.sort.mb", "1");
m_conf.writeXml(new FileOutputStream(CONF_FILE));
int dataNodes = 4;
m_dfs = new MiniDFSCluster(m_conf, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
m_fileSys.mkdirs(m_fileSys.getWorkingDirectory());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void shutdownMiniMrClusters() {
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
}
}
|
Enable unit test "TestStreamingUDF" in spark (liyunzhang via praveen)
|
PIG-4268: Enable unit test "TestStreamingUDF" in spark (liyunzhang via praveen)
git-svn-id: d317905e1b1233abb7022f5914f79c3119e04b87@1645891 13f79535-47bb-0310-9956-ffa450edef68
|
Java
|
apache-2.0
|
kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig,kellyzly/pig
|
java
|
## Code Before:
package org.apache.pig.test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.pig.ExecType;
import org.apache.pig.backend.hadoop.executionengine.spark.SparkExecType;
public class SparkMiniCluster extends MiniGenericCluster {
private static final File CONF_DIR = new File("build/classes");
private static final File CONF_FILE = new File(CONF_DIR, "hadoop-site.xml");
private ExecType spark = new SparkExecType();
SparkMiniCluster() {
}
@Override
public ExecType getExecType() {
return spark;
}
@Override
protected void setupMiniDfsAndMrClusters() {
try {
CONF_DIR.mkdirs();
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
m_conf = new Configuration();
m_conf.set("io.sort.mb", "1");
m_conf.writeXml(new FileOutputStream(CONF_FILE));
int dataNodes = 4;
m_dfs = new MiniDFSCluster(m_conf, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void shutdownMiniMrClusters() {
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
}
}
## Instruction:
PIG-4268: Enable unit test "TestStreamingUDF" in spark (liyunzhang via praveen)
git-svn-id: d317905e1b1233abb7022f5914f79c3119e04b87@1645891 13f79535-47bb-0310-9956-ffa450edef68
## Code After:
package org.apache.pig.test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.pig.ExecType;
import org.apache.pig.backend.hadoop.executionengine.spark.SparkExecType;
public class SparkMiniCluster extends MiniGenericCluster {
private static final File CONF_DIR = new File("build/classes");
private static final File CONF_FILE = new File(CONF_DIR, "hadoop-site.xml");
private ExecType spark = new SparkExecType();
SparkMiniCluster() {
}
@Override
public ExecType getExecType() {
return spark;
}
@Override
protected void setupMiniDfsAndMrClusters() {
try {
CONF_DIR.mkdirs();
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
m_conf = new Configuration();
m_conf.set("io.sort.mb", "1");
m_conf.writeXml(new FileOutputStream(CONF_FILE));
int dataNodes = 4;
m_dfs = new MiniDFSCluster(m_conf, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
m_fileSys.mkdirs(m_fileSys.getWorkingDirectory());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void shutdownMiniMrClusters() {
if (CONF_FILE.exists()) {
CONF_FILE.delete();
}
}
}
|
# ... existing code ...
int dataNodes = 4;
m_dfs = new MiniDFSCluster(m_conf, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
m_fileSys.mkdirs(m_fileSys.getWorkingDirectory());
} catch (IOException e) {
throw new RuntimeException(e);
# ... rest of the code ...
|
fdf05b0fa93c350d2cd030e451b0e26ed7393209
|
tests/clientlib/validate_manifest_test.py
|
tests/clientlib/validate_manifest_test.py
|
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
|
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected
|
Add better tests for manifest json schema
|
Add better tests for manifest json schema
|
Python
|
mit
|
chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,beni55/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Lucas-C/pre-commit,Lucas-C/pre-commit,dnephin/pre-commit,philipgian/pre-commit,dnephin/pre-commit,Teino1978-Corp/pre-commit,philipgian/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit-1,dnephin/pre-commit,Teino1978-Corp/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,beni55/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,Lucas-C/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,beni55/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,Lucas-C/pre-commit,beni55/pre-commit,barrysteyn/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit-1,pre-commit/pre-commit,pre-commit/pre-commit,barrysteyn/pre-commit,chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,dnephin/pre-commit,pre-commit/pre-commit
|
python
|
## Code Before:
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
## Instruction:
Add better tests for manifest json schema
## Code After:
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected
|
// ... existing code ...
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
// ... modified code ...
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected
// ... rest of the code ...
|
914aa3e86b039d935ae322bf2c0b654dd9de42ad
|
KugelmatikFirmware/config.h
|
KugelmatikFirmware/config.h
|
enum StepMode : uint8_t {
StepHalf = 1,
StepFull = 2,
StepBoth = 3
};
enum BrakeMode : uint8_t {
BrakeNone = 0,
BrakeAlways = 1,
BrakeSmart = 2
};
struct Config {
StepMode stepMode;
BrakeMode brakeMode;
uint32_t tickTime;
uint32_t homeTime;
uint32_t fixTime;
int16_t maxSteps; // Maximale Anzahl an Schritten die die Firmware maximal machen darf (nach unten)
int16_t homeSteps;
int16_t fixSteps; // Anzahl an Schritten die die Firmware macht um eine Kugel nach unten zu fahren (ignoriert dabei maxSteps)
uint16_t brakeTicks;
uint16_t minStepDelta; // Unterschied zwischen derzeitiger Hhe und Zielhhe ab wann die Kugel bewegt werden soll
};
extern Config config;
// setzt die Standard Config
void setDefaultConfig();
// prft Config auf invalide Werte
boolean checkConfig(Config* config);
|
enum StepMode : uint8_t {
StepHalf = 1,
StepFull = 2,
StepBoth = 3
};
enum BrakeMode : uint8_t {
BrakeNone = 0,
BrakeAlways = 1,
BrakeSmart = 2
};
struct Config {
StepMode stepMode;
BrakeMode brakeMode;
uint32_t tickTime;
uint32_t homeTime;
uint32_t fixTime;
int16_t maxSteps; // Maximale Anzahl an Schritten die die Firmware maximal machen darf (nach unten)
int16_t homeSteps;
int16_t fixSteps; // Anzahl an Schritten die die Firmware macht um eine Kugel nach unten zu fahren (ignoriert dabei maxSteps)
uint16_t brakeTicks;
uint16_t minStepDelta; // Unterschied zwischen derzeitiger Hhe und Zielhhe ab wann die Kugel bewegt werden soll
} __attribute__((__packed__));
extern Config config;
// setzt die Standard Config
void setDefaultConfig();
// prft Config auf invalide Werte
boolean checkConfig(Config* config);
|
Add packed attribute to Config
|
Add packed attribute to Config
|
C
|
mit
|
henrik1235/Kugelmatik,henrik1235/Kugelmatik,henrik1235/Kugelmatik
|
c
|
## Code Before:
enum StepMode : uint8_t {
StepHalf = 1,
StepFull = 2,
StepBoth = 3
};
enum BrakeMode : uint8_t {
BrakeNone = 0,
BrakeAlways = 1,
BrakeSmart = 2
};
struct Config {
StepMode stepMode;
BrakeMode brakeMode;
uint32_t tickTime;
uint32_t homeTime;
uint32_t fixTime;
int16_t maxSteps; // Maximale Anzahl an Schritten die die Firmware maximal machen darf (nach unten)
int16_t homeSteps;
int16_t fixSteps; // Anzahl an Schritten die die Firmware macht um eine Kugel nach unten zu fahren (ignoriert dabei maxSteps)
uint16_t brakeTicks;
uint16_t minStepDelta; // Unterschied zwischen derzeitiger Hhe und Zielhhe ab wann die Kugel bewegt werden soll
};
extern Config config;
// setzt die Standard Config
void setDefaultConfig();
// prft Config auf invalide Werte
boolean checkConfig(Config* config);
## Instruction:
Add packed attribute to Config
## Code After:
enum StepMode : uint8_t {
StepHalf = 1,
StepFull = 2,
StepBoth = 3
};
enum BrakeMode : uint8_t {
BrakeNone = 0,
BrakeAlways = 1,
BrakeSmart = 2
};
struct Config {
StepMode stepMode;
BrakeMode brakeMode;
uint32_t tickTime;
uint32_t homeTime;
uint32_t fixTime;
int16_t maxSteps; // Maximale Anzahl an Schritten die die Firmware maximal machen darf (nach unten)
int16_t homeSteps;
int16_t fixSteps; // Anzahl an Schritten die die Firmware macht um eine Kugel nach unten zu fahren (ignoriert dabei maxSteps)
uint16_t brakeTicks;
uint16_t minStepDelta; // Unterschied zwischen derzeitiger Hhe und Zielhhe ab wann die Kugel bewegt werden soll
} __attribute__((__packed__));
extern Config config;
// setzt die Standard Config
void setDefaultConfig();
// prft Config auf invalide Werte
boolean checkConfig(Config* config);
|
# ... existing code ...
uint16_t brakeTicks;
uint16_t minStepDelta; // Unterschied zwischen derzeitiger Hhe und Zielhhe ab wann die Kugel bewegt werden soll
} __attribute__((__packed__));
extern Config config;
# ... rest of the code ...
|
52d32849f4cd38ca7a0fcfc0418e9e9580dd426a
|
kimochiconsumer/views.py
|
kimochiconsumer/views.py
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
Use the gallery_image method for required information
|
Use the gallery_image method for required information
|
Python
|
mit
|
matslindh/kimochi-consumer
|
python
|
## Code Before:
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
## Instruction:
Use the gallery_image method for required information
## Code After:
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
...
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
...
|
aef238386c71d52def424c8f47a103bd25f12e26
|
server/proposal/migrations/0034_fix_updated.py
|
server/proposal/migrations/0034_fix_updated.py
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
|
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
Make fix_updated migration (sort of) reversible
|
Make fix_updated migration (sort of) reversible
|
Python
|
mit
|
cityofsomerville/citydash,cityofsomerville/citydash,codeforboston/cornerwise,codeforboston/cornerwise,codeforboston/cornerwise,codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/cornerwise,cityofsomerville/cornerwise
|
python
|
## Code Before:
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated),
]
## Instruction:
Make fix_updated migration (sort of) reversible
## Code After:
import django.contrib.gis.db.models.fields
from django.db import migrations
from django.contrib.gis.db.models import Max
def fix_updated(apps, _):
Proposal = apps.get_model("proposal", "Proposal")
proposals = Proposal.objects.annotate(published=Max("documents__published"))
for proposal in proposals:
if proposal.published:
proposal.updated = proposal.published
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
|
// ... existing code ...
proposal.save()
def do_nothing(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('proposal', '0033_non_null_started'),
// ... modified code ...
]
operations = [
migrations.RunPython(fix_updated, do_nothing),
]
// ... rest of the code ...
|
4b84cedd15a2774391544a6edee3532e5e267608
|
tests/docs/test_docs.py
|
tests/docs/test_docs.py
|
import subprocess
import unittest
import os
import subprocess
import unittest
import os
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, filename = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ['docs']
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "linkcheck"])
print(response.returncode)
self.assertTrue(response.returncode == 0)
os.chdir(wd)
if __name__ == '__main__':
unittest.main()
|
import subprocess
import unittest
import os
import subprocess
import unittest
import os
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, filename = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ['docs']
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
# response = subprocess.call(["make", "html"], shell=True) # Needed for local test on Windows
# self.assertTrue(response == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "linkcheck"])
print(response.returncode)
self.assertTrue(response.returncode == 0)
# response = subprocess.call(["make", "linkcheck"], shell=True) # Needed for local test on Windows
# print(response)
# self.assertTrue(response == 0)
os.chdir(wd)
if __name__ == '__main__':
unittest.main()
|
Edit docs test for local test on windows machine
|
Edit docs test for local test on windows machine
|
Python
|
mit
|
simpeg/simpeg
|
python
|
## Code Before:
import subprocess
import unittest
import os
import subprocess
import unittest
import os
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, filename = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ['docs']
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "linkcheck"])
print(response.returncode)
self.assertTrue(response.returncode == 0)
os.chdir(wd)
if __name__ == '__main__':
unittest.main()
## Instruction:
Edit docs test for local test on windows machine
## Code After:
import subprocess
import unittest
import os
import subprocess
import unittest
import os
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, filename = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ['docs']
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
# response = subprocess.call(["make", "html"], shell=True) # Needed for local test on Windows
# self.assertTrue(response == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
response = subprocess.run(["make", "linkcheck"])
print(response.returncode)
self.assertTrue(response.returncode == 0)
# response = subprocess.call(["make", "linkcheck"], shell=True) # Needed for local test on Windows
# print(response)
# self.assertTrue(response == 0)
os.chdir(wd)
if __name__ == '__main__':
unittest.main()
|
...
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
# response = subprocess.call(["make", "html"], shell=True) # Needed for local test on Windows
# self.assertTrue(response == 0)
os.chdir(wd)
def test_linkcheck(self):
...
response = subprocess.run(["make", "linkcheck"])
print(response.returncode)
self.assertTrue(response.returncode == 0)
# response = subprocess.call(["make", "linkcheck"], shell=True) # Needed for local test on Windows
# print(response)
# self.assertTrue(response == 0)
os.chdir(wd)
...
|
3990e3aa64cff288def07ee36e24026cc15282c0
|
taiga/projects/issues/serializers.py
|
taiga/projects/issues/serializers.py
|
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
comment = serializers.SerializerMethodField("get_comment")
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
def get_comment(self, obj):
return ""
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
|
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
|
Remove unnecessary field from IssueSerializer
|
Remove unnecessary field from IssueSerializer
|
Python
|
agpl-3.0
|
forging2012/taiga-back,EvgeneOskin/taiga-back,xdevelsistemas/taiga-back-community,seanchen/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,crr0004/taiga-back,dayatz/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,crr0004/taiga-back,obimod/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,19kestier/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,WALR/taiga-back,joshisa/taiga-back,astronaut1712/taiga-back,taigaio/taiga-back,coopsource/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,obimod/taiga-back,obimod/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,Tigerwhit4/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,Zaneh-/bearded-tribble-back,dayatz/taiga-back,CoolCloud/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,seanchen/taiga-back,astagi/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,astagi/taiga-back,EvgeneOskin/taiga-back,obimod/taiga-back,gam-phon/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,19kestier/taiga-back,astronaut1712/taiga-back,forging2012/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,astagi/taiga-back,WALR/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,joshisa/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,crr0004/taiga-back,forging2012/taiga-back,joshisa/taiga-back,CMLL/taiga-back,dycodedev/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,CoolCloud/taiga-back,gauravjns/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,Tigerwhit4/taiga-back,Tigerwhit4/taiga-back
|
python
|
## Code Before:
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
comment = serializers.SerializerMethodField("get_comment")
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
def get_comment(self, obj):
return ""
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
## Instruction:
Remove unnecessary field from IssueSerializer
## Code After:
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
|
# ... existing code ...
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
# ... rest of the code ...
|
0a77cedc5dd384cead701b6c9b58d67da4971757
|
fetch.h
|
fetch.h
|
typedef int (*match_func)(const char *fetch_path, const char *state_path);
struct path_matcher {
char *fetch_path;
match_func match_function;
};
struct fetch {
char *fetch_id;
const struct peer *peer;
struct list_head next_fetch;
struct path_matcher matcher[12];
};
cJSON *add_fetch_to_peer(struct peer *p, cJSON *params);
void remove_all_fetchers_from_peer(struct peer *p);
#endif
|
typedef int (*match_func)(const char *fetch_path, const char *state_path);
struct path_matcher {
char *fetch_path;
match_func match_function;
uintptr_t cookie;
};
struct fetch {
char *fetch_id;
const struct peer *peer;
struct list_head next_fetch;
struct path_matcher matcher[12];
};
cJSON *add_fetch_to_peer(struct peer *p, cJSON *params);
void remove_all_fetchers_from_peer(struct peer *p);
#endif
|
Add cookie entry for auxilary match data.
|
Add cookie entry for auxilary match data.
|
C
|
mit
|
gatzka/cjet,gatzka/cjet,gatzka/cjet,mloy/cjet,gatzka/cjet,mloy/cjet,mloy/cjet,mloy/cjet,mloy/cjet,gatzka/cjet
|
c
|
## Code Before:
typedef int (*match_func)(const char *fetch_path, const char *state_path);
struct path_matcher {
char *fetch_path;
match_func match_function;
};
struct fetch {
char *fetch_id;
const struct peer *peer;
struct list_head next_fetch;
struct path_matcher matcher[12];
};
cJSON *add_fetch_to_peer(struct peer *p, cJSON *params);
void remove_all_fetchers_from_peer(struct peer *p);
#endif
## Instruction:
Add cookie entry for auxilary match data.
## Code After:
typedef int (*match_func)(const char *fetch_path, const char *state_path);
struct path_matcher {
char *fetch_path;
match_func match_function;
uintptr_t cookie;
};
struct fetch {
char *fetch_id;
const struct peer *peer;
struct list_head next_fetch;
struct path_matcher matcher[12];
};
cJSON *add_fetch_to_peer(struct peer *p, cJSON *params);
void remove_all_fetchers_from_peer(struct peer *p);
#endif
|
// ... existing code ...
struct path_matcher {
char *fetch_path;
match_func match_function;
uintptr_t cookie;
};
struct fetch {
// ... rest of the code ...
|
c5e319363727f332b04ac863e494cb04c52c91b5
|
drupal/Revert.py
|
drupal/Revert.py
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
|
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
|
Python
|
mit
|
codeenigma/deployments,codeenigma/deployments,codeenigma/deployments,codeenigma/deployments
|
python
|
## Code Before:
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
## Instruction:
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
## Code After:
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
...
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
...
|
38d298a81aa8fcd85b16b3879c1665085e5450be
|
exercises/control_flow/prime.py
|
exercises/control_flow/prime.py
|
def is_prime(integer):
"""Determines weather integer is prime, returns a boolean value"""
for i in range(2, integer):
if integer % i == 0:
return False
return True
print("Should be False (0): %r" % is_prime(0))
print("Should be False (1): %r" % is_prime(1))
print("Should be True (2): %r" % is_prime(2))
print("Should be False (8): %r" % is_prime(8))
print("Should be True (17): %r"% is_prime(17))
# Your code below:
|
def is_prime(integer):
"""Determines weather integer is prime, returns a boolean value"""
# add logic here to make sure number < 2 are not prime
for i in range(2, integer):
if integer % i == 0:
return False
return True
print("Should be False (0): %r" % is_prime(0))
print("Should be False (1): %r" % is_prime(1))
print("Should be True (2): %r" % is_prime(2))
print("Should be False (8): %r" % is_prime(8))
print("Should be True (17): %r"% is_prime(17))
# Your code below:
|
Add description where student should add logic
|
Add description where student should add logic
|
Python
|
mit
|
introprogramming/exercises,introprogramming/exercises,introprogramming/exercises
|
python
|
## Code Before:
def is_prime(integer):
"""Determines weather integer is prime, returns a boolean value"""
for i in range(2, integer):
if integer % i == 0:
return False
return True
print("Should be False (0): %r" % is_prime(0))
print("Should be False (1): %r" % is_prime(1))
print("Should be True (2): %r" % is_prime(2))
print("Should be False (8): %r" % is_prime(8))
print("Should be True (17): %r"% is_prime(17))
# Your code below:
## Instruction:
Add description where student should add logic
## Code After:
def is_prime(integer):
"""Determines weather integer is prime, returns a boolean value"""
# add logic here to make sure number < 2 are not prime
for i in range(2, integer):
if integer % i == 0:
return False
return True
print("Should be False (0): %r" % is_prime(0))
print("Should be False (1): %r" % is_prime(1))
print("Should be True (2): %r" % is_prime(2))
print("Should be False (8): %r" % is_prime(8))
print("Should be True (17): %r"% is_prime(17))
# Your code below:
|
// ... existing code ...
def is_prime(integer):
"""Determines weather integer is prime, returns a boolean value"""
# add logic here to make sure number < 2 are not prime
for i in range(2, integer):
if integer % i == 0:
return False
// ... rest of the code ...
|
bd2155709c69b2549c9b43ef4271221933411301
|
app/src/unitTests/java/io/github/plastix/forage/util/RxUtilsTest.java
|
app/src/unitTests/java/io/github/plastix/forage/util/RxUtilsTest.java
|
package io.github.plastix.forage.util;
public class RxUtilsTest {
}
|
package io.github.plastix.forage.util;
import org.junit.Test;
import rx.Subscription;
import static com.google.common.truth.Truth.assert_;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RxUtilsTest {
@Test
public void safeUnsubscribe_handleNullSubscription() {
try {
RxUtils.safeUnsubscribe(null);
} catch (Exception e) {
assert_().fail("RxUtils safeUnsubscribe threw an unexpected error!", e);
}
}
@Test
public void safeUnsubscribe_unsubscribeSubscriptionCorrectly() {
Subscription subscription = mock(Subscription.class);
RxUtils.safeUnsubscribe(subscription);
verify(subscription, times(1)).unsubscribe();
}
@Test
public void safeUnsubscribe_onlyUnsubscribeActiveSubscriptions() {
Subscription subscription = mock(Subscription.class);
when(subscription.isUnsubscribed()).thenReturn(true);
RxUtils.safeUnsubscribe(subscription);
verify(subscription, never()).unsubscribe();
}
}
|
Add unit tests for RxUtils.safeUnsubscribe
|
Add unit tests for RxUtils.safeUnsubscribe
|
Java
|
mpl-2.0
|
Plastix/Forage,Plastix/Forage
|
java
|
## Code Before:
package io.github.plastix.forage.util;
public class RxUtilsTest {
}
## Instruction:
Add unit tests for RxUtils.safeUnsubscribe
## Code After:
package io.github.plastix.forage.util;
import org.junit.Test;
import rx.Subscription;
import static com.google.common.truth.Truth.assert_;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RxUtilsTest {
@Test
public void safeUnsubscribe_handleNullSubscription() {
try {
RxUtils.safeUnsubscribe(null);
} catch (Exception e) {
assert_().fail("RxUtils safeUnsubscribe threw an unexpected error!", e);
}
}
@Test
public void safeUnsubscribe_unsubscribeSubscriptionCorrectly() {
Subscription subscription = mock(Subscription.class);
RxUtils.safeUnsubscribe(subscription);
verify(subscription, times(1)).unsubscribe();
}
@Test
public void safeUnsubscribe_onlyUnsubscribeActiveSubscriptions() {
Subscription subscription = mock(Subscription.class);
when(subscription.isUnsubscribed()).thenReturn(true);
RxUtils.safeUnsubscribe(subscription);
verify(subscription, never()).unsubscribe();
}
}
|
...
package io.github.plastix.forage.util;
import org.junit.Test;
import rx.Subscription;
import static com.google.common.truth.Truth.assert_;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RxUtilsTest {
@Test
public void safeUnsubscribe_handleNullSubscription() {
try {
RxUtils.safeUnsubscribe(null);
} catch (Exception e) {
assert_().fail("RxUtils safeUnsubscribe threw an unexpected error!", e);
}
}
@Test
public void safeUnsubscribe_unsubscribeSubscriptionCorrectly() {
Subscription subscription = mock(Subscription.class);
RxUtils.safeUnsubscribe(subscription);
verify(subscription, times(1)).unsubscribe();
}
@Test
public void safeUnsubscribe_onlyUnsubscribeActiveSubscriptions() {
Subscription subscription = mock(Subscription.class);
when(subscription.isUnsubscribed()).thenReturn(true);
RxUtils.safeUnsubscribe(subscription);
verify(subscription, never()).unsubscribe();
}
}
...
|
acc7b768ee6f8bb356811839d8d5c0cdcd088cc6
|
setup.py
|
setup.py
|
import json
import os
from setuptools import setup, find_packages
def get_requirements_from_pipfile_lock(pipfile_lock=None):
if pipfile_lock is None:
pipfile_lock = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Pipfile.lock')
lock_data = json.load(open(pipfile_lock))
return [package_name for package_name in lock_data.get('default', {}).keys()]
pipfile_lock_requirements = get_requirements_from_pipfile_lock()
setup(
name='dmpy',
version='0.13.2',
description=open('README.rst').read(),
author='Kiran Garimella and Warren Kretzschmar',
author_email='[email protected]',
packages=find_packages(),
install_requires=pipfile_lock_requirements,
url='https://github.com/kvg/dmpy',
)
|
import json
import os
from setuptools import setup, find_packages
def get_requirements_from_pipfile_lock(pipfile_lock=None):
if pipfile_lock is None:
pipfile_lock = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Pipfile.lock')
lock_data = json.load(open(pipfile_lock))
return [package_name for package_name in lock_data.get('default', {}).keys()]
pipfile_lock_requirements = get_requirements_from_pipfile_lock()
setup(
name='dmpy',
version='0.13.3',
description='Distributed Make for Python',
long_description=open('README.rst').read(),
author='Kiran Garimella and Warren Kretzschmar',
author_email='[email protected]',
packages=find_packages(),
install_requires=pipfile_lock_requirements,
url='https://github.com/kvg/dmpy',
)
|
Fix README.rst in wrong place
|
Fix README.rst in wrong place
|
Python
|
mit
|
kvg/dmpy
|
python
|
## Code Before:
import json
import os
from setuptools import setup, find_packages
def get_requirements_from_pipfile_lock(pipfile_lock=None):
if pipfile_lock is None:
pipfile_lock = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Pipfile.lock')
lock_data = json.load(open(pipfile_lock))
return [package_name for package_name in lock_data.get('default', {}).keys()]
pipfile_lock_requirements = get_requirements_from_pipfile_lock()
setup(
name='dmpy',
version='0.13.2',
description=open('README.rst').read(),
author='Kiran Garimella and Warren Kretzschmar',
author_email='[email protected]',
packages=find_packages(),
install_requires=pipfile_lock_requirements,
url='https://github.com/kvg/dmpy',
)
## Instruction:
Fix README.rst in wrong place
## Code After:
import json
import os
from setuptools import setup, find_packages
def get_requirements_from_pipfile_lock(pipfile_lock=None):
if pipfile_lock is None:
pipfile_lock = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Pipfile.lock')
lock_data = json.load(open(pipfile_lock))
return [package_name for package_name in lock_data.get('default', {}).keys()]
pipfile_lock_requirements = get_requirements_from_pipfile_lock()
setup(
name='dmpy',
version='0.13.3',
description='Distributed Make for Python',
long_description=open('README.rst').read(),
author='Kiran Garimella and Warren Kretzschmar',
author_email='[email protected]',
packages=find_packages(),
install_requires=pipfile_lock_requirements,
url='https://github.com/kvg/dmpy',
)
|
// ... existing code ...
pipfile_lock_requirements = get_requirements_from_pipfile_lock()
setup(
name='dmpy',
version='0.13.3',
description='Distributed Make for Python',
long_description=open('README.rst').read(),
author='Kiran Garimella and Warren Kretzschmar',
author_email='[email protected]',
packages=find_packages(),
// ... rest of the code ...
|
c326becad43949999d151cd1e10fcb75f9d2b148
|
lib/constants.py
|
lib/constants.py
|
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
JSON_PUBSUB_PORT = 15596
|
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
|
Add missing constant for ssl listener.
|
Add missing constant for ssl listener.
|
Python
|
apache-2.0
|
MediaMath/qasino,MediaMath/qasino
|
python
|
## Code Before:
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
JSON_PUBSUB_PORT = 15596
## Instruction:
Add missing constant for ssl listener.
## Code After:
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
|
...
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
...
|
b17244b8aaacaa4bdd06d076f16831ad76b6fb9f
|
src/new/autonomous.c
|
src/new/autonomous.c
|
/* ======================================================================================================
___________________________________________________________________________________________________
| __ __ ________ __ __ __ ______ __ __ _________ ________ |
| \ \ / / | _____| \ \ / / / \ | _ \ \ \ / / |___ ___| | _____| |
| \ \ / / | |_____ \ \_/ / / /\ \ | |_| / \ \_/ / | | | |_____ |
| \ \ / / | _____| ) _ ( / /__\ \ | _ | \ / | | | _____| |
| \ \/ / | |_____ / / \ \ / ______ \ | |_| \ | | | | | |_____ |
| \__/ |________| /_/ \_\ /_/ \_\ |______/ |_| |_| |________| |
|___________________________________________________________________________________________________|
====================================================================================================== */
void pre_auton()
{
}
task autonomous()
{
}
|
/* ======================================================================================================
___________________________________________________________________________________________________
| __ __ ________ __ __ __ ______ __ __ _________ ________ |
| \ \ / / | _____| \ \ / / / \ | _ \ \ \ / / |___ ___| | _____| |
| \ \ / / | |_____ \ \_/ / / /\ \ | |_| / \ \_/ / | | | |_____ |
| \ \ / / | _____| ) _ ( / /__\ \ | _ | \ / | | | _____| |
| \ \/ / | |_____ / / \ \ / ______ \ | |_| \ | | | | | |_____ |
| \__/ |________| /_/ \_\ /_/ \_\ |______/ |_| |_| |________| |
|___________________________________________________________________________________________________|
====================================================================================================== */
static void a_base_encoders_reset(void);
void pre_auton()
{
a_base_encoders_reset();
}
task autonomous()
{
}
static void a_base_encoders_reset()
{
resetMotorEncoder(mBaseFL);
resetMotorEncoder(mBaseFR);
resetMotorEncoder(mBaseBL);
resetMotorEncoder(mBaseBR);
}
|
Reset motor encoders in pre_auton
|
Reset motor encoders in pre_auton
|
C
|
mit
|
qsctr/vex-4194b-2016
|
c
|
## Code Before:
/* ======================================================================================================
___________________________________________________________________________________________________
| __ __ ________ __ __ __ ______ __ __ _________ ________ |
| \ \ / / | _____| \ \ / / / \ | _ \ \ \ / / |___ ___| | _____| |
| \ \ / / | |_____ \ \_/ / / /\ \ | |_| / \ \_/ / | | | |_____ |
| \ \ / / | _____| ) _ ( / /__\ \ | _ | \ / | | | _____| |
| \ \/ / | |_____ / / \ \ / ______ \ | |_| \ | | | | | |_____ |
| \__/ |________| /_/ \_\ /_/ \_\ |______/ |_| |_| |________| |
|___________________________________________________________________________________________________|
====================================================================================================== */
void pre_auton()
{
}
task autonomous()
{
}
## Instruction:
Reset motor encoders in pre_auton
## Code After:
/* ======================================================================================================
___________________________________________________________________________________________________
| __ __ ________ __ __ __ ______ __ __ _________ ________ |
| \ \ / / | _____| \ \ / / / \ | _ \ \ \ / / |___ ___| | _____| |
| \ \ / / | |_____ \ \_/ / / /\ \ | |_| / \ \_/ / | | | |_____ |
| \ \ / / | _____| ) _ ( / /__\ \ | _ | \ / | | | _____| |
| \ \/ / | |_____ / / \ \ / ______ \ | |_| \ | | | | | |_____ |
| \__/ |________| /_/ \_\ /_/ \_\ |______/ |_| |_| |________| |
|___________________________________________________________________________________________________|
====================================================================================================== */
static void a_base_encoders_reset(void);
void pre_auton()
{
a_base_encoders_reset();
}
task autonomous()
{
}
static void a_base_encoders_reset()
{
resetMotorEncoder(mBaseFL);
resetMotorEncoder(mBaseFR);
resetMotorEncoder(mBaseBL);
resetMotorEncoder(mBaseBR);
}
|
# ... existing code ...
====================================================================================================== */
static void a_base_encoders_reset(void);
void pre_auton()
{
a_base_encoders_reset();
}
task autonomous()
# ... modified code ...
{
}
static void a_base_encoders_reset()
{
resetMotorEncoder(mBaseFL);
resetMotorEncoder(mBaseFR);
resetMotorEncoder(mBaseBL);
resetMotorEncoder(mBaseBR);
}
# ... rest of the code ...
|
51c2b7d163ed5e7099ef948231c5236a55b894f9
|
src/test/java/com/tyleryates/util/WordTest.java
|
src/test/java/com/tyleryates/util/WordTest.java
|
package com.tyleryates.util;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("JavaDoc")
public class WordTest {
private Word word;
@Before
public void setUp() throws Exception {
word = new Word("lottery");
}
@Test
public void testCanMake() throws Exception {
assertTrue(word.canMake(word));
assertTrue(word.canMake("lot"));
assertTrue(word.canMake("try"));
assertTrue(word.canMake("let"));
assertTrue(word.canMake(""));
assertFalse(word.canMake("lotteries"));
assertFalse(word.canMake("caring"));
}
}
|
package com.tyleryates.util;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("JavaDoc")
public class WordTest {
@Rule
public final ExpectedException exception = ExpectedException.none();
private Word word;
private static final String STRING = "lottery";
@Before
public void setUp() throws Exception {
word = new Word(STRING);
}
@Test
public void testWordConstructor() {
exception.expect(IllegalArgumentException.class);
new Word(null);
}
@Test
public void testGetWord() {
assertEquals(STRING, word.getString());
assertEquals("", new Word("").getString());
}
@Test
public void testCanMake() throws Exception {
assertTrue(word.canMake(word));
assertTrue(word.canMake("lot"));
assertTrue(word.canMake("try"));
assertTrue(word.canMake("let"));
assertTrue(word.canMake(""));
assertFalse(word.canMake("lotteries"));
assertFalse(word.canMake("caring"));
}
}
|
Add tests for Word class.
|
Add tests for Word class.
|
Java
|
mit
|
Tyler-Yates/Myrtle
|
java
|
## Code Before:
package com.tyleryates.util;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("JavaDoc")
public class WordTest {
private Word word;
@Before
public void setUp() throws Exception {
word = new Word("lottery");
}
@Test
public void testCanMake() throws Exception {
assertTrue(word.canMake(word));
assertTrue(word.canMake("lot"));
assertTrue(word.canMake("try"));
assertTrue(word.canMake("let"));
assertTrue(word.canMake(""));
assertFalse(word.canMake("lotteries"));
assertFalse(word.canMake("caring"));
}
}
## Instruction:
Add tests for Word class.
## Code After:
package com.tyleryates.util;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("JavaDoc")
public class WordTest {
@Rule
public final ExpectedException exception = ExpectedException.none();
private Word word;
private static final String STRING = "lottery";
@Before
public void setUp() throws Exception {
word = new Word(STRING);
}
@Test
public void testWordConstructor() {
exception.expect(IllegalArgumentException.class);
new Word(null);
}
@Test
public void testGetWord() {
assertEquals(STRING, word.getString());
assertEquals("", new Word("").getString());
}
@Test
public void testCanMake() throws Exception {
assertTrue(word.canMake(word));
assertTrue(word.canMake("lot"));
assertTrue(word.canMake("try"));
assertTrue(word.canMake("let"));
assertTrue(word.canMake(""));
assertFalse(word.canMake("lotteries"));
assertFalse(word.canMake("caring"));
}
}
|
# ... existing code ...
package com.tyleryates.util;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("JavaDoc")
public class WordTest {
@Rule
public final ExpectedException exception = ExpectedException.none();
private Word word;
private static final String STRING = "lottery";
@Before
public void setUp() throws Exception {
word = new Word(STRING);
}
@Test
public void testWordConstructor() {
exception.expect(IllegalArgumentException.class);
new Word(null);
}
@Test
public void testGetWord() {
assertEquals(STRING, word.getString());
assertEquals("", new Word("").getString());
}
@Test
# ... rest of the code ...
|
ad757857b7878904c6d842e115074c4fac24bed7
|
tweetar.py
|
tweetar.py
|
import twitter
import urllib2
NOAA_URL = "http://weather.noaa.gov/pub/data/observations/metar/stations/*station_id*.TXT"
def retrieve_and_post(conf):
post = False
pull_url = NOAA_URL.replace('*station_id*', conf['station'])
request = urllib2.Request(pull_url, None)
response = urllib2.urlopen(request)
metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response
if getattr(conf, 'hashtag', False):
metar = '%s #%s' % (metar, conf['hashtag'])
api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password'])
# get the last posted message and make sure it's different before attempting to post. Twitter isn't supposed to allow dupes through but I'm seeing it happen anyway
past_statuses = api.GetUserTimeline(conf['twitter_user'])
if past_statuses[-0].text != metar:
post = True
if post:
api.PostUpdate(metar)
if __name__ == '__main__':
retrieve_and_post({'station': '<station_id>', 'twitter_user': '<twitter_user>', 'twitter_password': '<twitter_pass>'})
|
import twitter
import urllib2
NOAA_URL = "http://weather.noaa.gov/pub/data/observations/metar/stations/*station_id*.TXT"
def retrieve_and_post(conf):
post = False
pull_url = NOAA_URL.replace('*station_id*', conf['station'])
request = urllib2.Request(pull_url, None)
response = urllib2.urlopen(request)
metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response
if conf.get('hashtag', False):
metar = '%s #%s' % (metar, conf['hashtag'])
api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password'])
# get the last posted message and make sure it's different before attempting to post. Twitter isn't supposed to allow dupes through but I'm seeing it happen anyway
past_statuses = api.GetUserTimeline(conf['twitter_user'])
if past_statuses[-0].text != metar:
post = True
if post:
api.PostUpdate(metar)
if __name__ == '__main__':
retrieve_and_post({'station': '<station_id>', 'twitter_user': '<twitter_user>', 'twitter_password': '<twitter_pass>'})
|
Use .get instead of getattr, dummy.
|
Use .get instead of getattr, dummy.
|
Python
|
bsd-3-clause
|
adamfast/python-tweetar
|
python
|
## Code Before:
import twitter
import urllib2
NOAA_URL = "http://weather.noaa.gov/pub/data/observations/metar/stations/*station_id*.TXT"
def retrieve_and_post(conf):
post = False
pull_url = NOAA_URL.replace('*station_id*', conf['station'])
request = urllib2.Request(pull_url, None)
response = urllib2.urlopen(request)
metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response
if getattr(conf, 'hashtag', False):
metar = '%s #%s' % (metar, conf['hashtag'])
api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password'])
# get the last posted message and make sure it's different before attempting to post. Twitter isn't supposed to allow dupes through but I'm seeing it happen anyway
past_statuses = api.GetUserTimeline(conf['twitter_user'])
if past_statuses[-0].text != metar:
post = True
if post:
api.PostUpdate(metar)
if __name__ == '__main__':
retrieve_and_post({'station': '<station_id>', 'twitter_user': '<twitter_user>', 'twitter_password': '<twitter_pass>'})
## Instruction:
Use .get instead of getattr, dummy.
## Code After:
import twitter
import urllib2
NOAA_URL = "http://weather.noaa.gov/pub/data/observations/metar/stations/*station_id*.TXT"
def retrieve_and_post(conf):
post = False
pull_url = NOAA_URL.replace('*station_id*', conf['station'])
request = urllib2.Request(pull_url, None)
response = urllib2.urlopen(request)
metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response
if conf.get('hashtag', False):
metar = '%s #%s' % (metar, conf['hashtag'])
api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password'])
# get the last posted message and make sure it's different before attempting to post. Twitter isn't supposed to allow dupes through but I'm seeing it happen anyway
past_statuses = api.GetUserTimeline(conf['twitter_user'])
if past_statuses[-0].text != metar:
post = True
if post:
api.PostUpdate(metar)
if __name__ == '__main__':
retrieve_and_post({'station': '<station_id>', 'twitter_user': '<twitter_user>', 'twitter_password': '<twitter_pass>'})
|
# ... existing code ...
response = urllib2.urlopen(request)
metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response
if conf.get('hashtag', False):
metar = '%s #%s' % (metar, conf['hashtag'])
api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password'])
# ... rest of the code ...
|
d5b231fbc5dd32ded78e4499a49872487533cda4
|
tests/test_main.py
|
tests/test_main.py
|
from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('[email protected]:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
appveyor_temp_dir = (
'c:\\users\\appveyor\\appdata\\local\\temp\\1\\pytest-0\\'
'test_default_output_dir0\\template'
)
assert is_repo_url(appveyor_temp_dir) is False
|
from cookiecutter.main import is_repo_url, expand_abbreviations
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('[email protected]:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
appveyor_temp_dir = (
'c:\\users\\appveyor\\appdata\\local\\temp\\1\\pytest-0\\'
'test_default_output_dir0\\template'
)
assert is_repo_url(appveyor_temp_dir) is False
def test_expand_abbreviations():
template = 'gh:audreyr/cookiecutter-pypackage'
# This is not a valid repo url just yet!
# First `main.expand_abbreviations` needs to translate it
assert is_repo_url(template) is False
expanded_template = expand_abbreviations(template, {})
assert is_repo_url(expanded_template) is True
|
Implement a test specifically for abbreviations
|
Implement a test specifically for abbreviations
|
Python
|
bsd-3-clause
|
willingc/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,ramiroluz/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,michaeljoseph/cookiecutter,pjbull/cookiecutter,cguardia/cookiecutter,terryjbates/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter,dajose/cookiecutter,Springerle/cookiecutter,terryjbates/cookiecutter,cguardia/cookiecutter,willingc/cookiecutter,ramiroluz/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,hackebrot/cookiecutter,dajose/cookiecutter
|
python
|
## Code Before:
from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('[email protected]:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
appveyor_temp_dir = (
'c:\\users\\appveyor\\appdata\\local\\temp\\1\\pytest-0\\'
'test_default_output_dir0\\template'
)
assert is_repo_url(appveyor_temp_dir) is False
## Instruction:
Implement a test specifically for abbreviations
## Code After:
from cookiecutter.main import is_repo_url, expand_abbreviations
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('[email protected]:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
appveyor_temp_dir = (
'c:\\users\\appveyor\\appdata\\local\\temp\\1\\pytest-0\\'
'test_default_output_dir0\\template'
)
assert is_repo_url(appveyor_temp_dir) is False
def test_expand_abbreviations():
template = 'gh:audreyr/cookiecutter-pypackage'
# This is not a valid repo url just yet!
# First `main.expand_abbreviations` needs to translate it
assert is_repo_url(template) is False
expanded_template = expand_abbreviations(template, {})
assert is_repo_url(expanded_template) is True
|
# ... existing code ...
from cookiecutter.main import is_repo_url, expand_abbreviations
def test_is_repo_url():
# ... modified code ...
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('[email protected]:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
...
'test_default_output_dir0\\template'
)
assert is_repo_url(appveyor_temp_dir) is False
def test_expand_abbreviations():
template = 'gh:audreyr/cookiecutter-pypackage'
# This is not a valid repo url just yet!
# First `main.expand_abbreviations` needs to translate it
assert is_repo_url(template) is False
expanded_template = expand_abbreviations(template, {})
assert is_repo_url(expanded_template) is True
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.