commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
93da664c36b47e478b7f52e1510a24d73f4f8d1d
|
runtime/src/chplexit.c
|
runtime/src/chplexit.c
|
static void chpl_exit_common(int status, int all) {
fflush(stdout);
fflush(stderr);
if (status != 0) {
gdbShouldBreakHere();
}
if (all) {
chpl_comm_barrier("chpl_comm_exit_all");
chpl_comm_stopPollingTask();
chpl_task_exit();
chpl_reportMemInfo();
chpl_mem_exit();
chpl_comm_exit_all(status);
} else {
chpl_mem_exit();
chpl_comm_exit_any(status);
}
exit(status);
}
void chpl_exit_all(int status) {
chpl_exit_common(status, 1);
}
void chpl_exit_any(int status) {
chpl_exit_common(status, 0);
}
|
static void chpl_exit_common(int status, int all) {
fflush(stdout);
fflush(stderr);
if (status != 0) {
gdbShouldBreakHere();
}
if (all) {
chpl_comm_barrier("chpl_exit_common");
chpl_comm_stopPollingTask();
chpl_task_exit();
chpl_reportMemInfo();
chpl_mem_exit();
chpl_comm_exit_all(status);
} else {
chpl_mem_exit();
chpl_comm_exit_any(status);
}
exit(status);
}
void chpl_exit_all(int status) {
chpl_exit_common(status, 1);
}
void chpl_exit_any(int status) {
chpl_exit_common(status, 0);
}
|
Clarify the debug message that may be generated by the chpl_comm_barrier() call in chpl_exit_common().
|
Clarify the debug message that may be generated by the
chpl_comm_barrier() call in chpl_exit_common().
git-svn-id: 88467cb1fb04b8a755be7e1ee1026be4190196ef@19217 3a8e244f-b0f2-452b-bcba-4c88e055c3ca
|
C
|
apache-2.0
|
chizarlicious/chapel,hildeth/chapel,chizarlicious/chapel,sungeunchoi/chapel,sungeunchoi/chapel,hildeth/chapel,CoryMcCartan/chapel,sungeunchoi/chapel,chizarlicious/chapel,CoryMcCartan/chapel,hildeth/chapel,hildeth/chapel,chizarlicious/chapel,chizarlicious/chapel,CoryMcCartan/chapel,sungeunchoi/chapel,sungeunchoi/chapel,sungeunchoi/chapel,chizarlicious/chapel,CoryMcCartan/chapel,hildeth/chapel,CoryMcCartan/chapel,hildeth/chapel,CoryMcCartan/chapel,sungeunchoi/chapel,chizarlicious/chapel,CoryMcCartan/chapel,hildeth/chapel,sungeunchoi/chapel
|
c
|
## Code Before:
static void chpl_exit_common(int status, int all) {
fflush(stdout);
fflush(stderr);
if (status != 0) {
gdbShouldBreakHere();
}
if (all) {
chpl_comm_barrier("chpl_comm_exit_all");
chpl_comm_stopPollingTask();
chpl_task_exit();
chpl_reportMemInfo();
chpl_mem_exit();
chpl_comm_exit_all(status);
} else {
chpl_mem_exit();
chpl_comm_exit_any(status);
}
exit(status);
}
void chpl_exit_all(int status) {
chpl_exit_common(status, 1);
}
void chpl_exit_any(int status) {
chpl_exit_common(status, 0);
}
## Instruction:
Clarify the debug message that may be generated by the
chpl_comm_barrier() call in chpl_exit_common().
git-svn-id: 88467cb1fb04b8a755be7e1ee1026be4190196ef@19217 3a8e244f-b0f2-452b-bcba-4c88e055c3ca
## Code After:
static void chpl_exit_common(int status, int all) {
fflush(stdout);
fflush(stderr);
if (status != 0) {
gdbShouldBreakHere();
}
if (all) {
chpl_comm_barrier("chpl_exit_common");
chpl_comm_stopPollingTask();
chpl_task_exit();
chpl_reportMemInfo();
chpl_mem_exit();
chpl_comm_exit_all(status);
} else {
chpl_mem_exit();
chpl_comm_exit_any(status);
}
exit(status);
}
void chpl_exit_all(int status) {
chpl_exit_common(status, 1);
}
void chpl_exit_any(int status) {
chpl_exit_common(status, 0);
}
|
// ... existing code ...
gdbShouldBreakHere();
}
if (all) {
chpl_comm_barrier("chpl_exit_common");
chpl_comm_stopPollingTask();
chpl_task_exit();
chpl_reportMemInfo();
// ... rest of the code ...
|
a5f2df3a540ac99dea73bc7d1d3c29f70fb13c60
|
tympeg/streamsaver.py
|
tympeg/streamsaver.py
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
Clean up imports after expirements with signals for quitting
|
Clean up imports after expirements with signals for quitting
|
Python
|
mit
|
taishengy/tympeg
|
python
|
## Code Before:
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
## Instruction:
Clean up imports after expirements with signals for quitting
## Code After:
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
...
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
...
|
183bd0005b71a587021c21b095961a0760e12f23
|
swampdragon/file_upload_handler.py
|
swampdragon/file_upload_handler.py
|
from os.path import join
from os import makedirs
from django.conf import settings
def make_file_id(file_data):
return str(abs(hash(file_data)))
def get_file_location(file_name, file_id):
path = join(settings.MEDIA_ROOT, 'tmp')
path = join(path, str(file_id))
try:
makedirs(path)
except:
pass
return join(path, file_name)
def get_file_url(file_name, file_id):
path = join(settings.MEDIA_URL, 'tmp')
path = join(path, str(file_id))
return join(path, file_name)
|
from os.path import join
from os import makedirs
from django.conf import settings
from datetime import datetime
import time
def make_file_id(file_data):
timestamp = datetime.now()
timestamp = time.mktime(timestamp.timetuple()) * 1e3 + timestamp.microsecond / 1e3
timestamp = '{}'.format(timestamp).encode()
return str(abs(hash(file_data + timestamp)))
def get_file_location(file_name, file_id):
path = join(settings.MEDIA_ROOT, 'tmp')
path = join(path, str(file_id))
try:
makedirs(path)
except:
pass
return join(path, file_name)
def get_file_url(file_name, file_id):
path = join(settings.MEDIA_URL, 'tmp')
path = join(path, str(file_id))
return join(path, file_name)
|
Improve the file id hash
|
Improve the file id hash
|
Python
|
bsd-3-clause
|
sahlinet/swampdragon,denizs/swampdragon,sahlinet/swampdragon,aexeagmbh/swampdragon,bastianh/swampdragon,faulkner/swampdragon,faulkner/swampdragon,bastianh/swampdragon,boris-savic/swampdragon,d9pouces/swampdragon,seclinch/swampdragon,aexeagmbh/swampdragon,Manuel4131/swampdragon,seclinch/swampdragon,Manuel4131/swampdragon,michael-k/swampdragon,h-hirokawa/swampdragon,bastianh/swampdragon,jonashagstedt/swampdragon,Manuel4131/swampdragon,denizs/swampdragon,h-hirokawa/swampdragon,seclinch/swampdragon,d9pouces/swampdragon,michael-k/swampdragon,jonashagstedt/swampdragon,faulkner/swampdragon,boris-savic/swampdragon,michael-k/swampdragon,denizs/swampdragon,sahlinet/swampdragon,aexeagmbh/swampdragon,d9pouces/swampdragon,jonashagstedt/swampdragon,boris-savic/swampdragon
|
python
|
## Code Before:
from os.path import join
from os import makedirs
from django.conf import settings
def make_file_id(file_data):
return str(abs(hash(file_data)))
def get_file_location(file_name, file_id):
path = join(settings.MEDIA_ROOT, 'tmp')
path = join(path, str(file_id))
try:
makedirs(path)
except:
pass
return join(path, file_name)
def get_file_url(file_name, file_id):
path = join(settings.MEDIA_URL, 'tmp')
path = join(path, str(file_id))
return join(path, file_name)
## Instruction:
Improve the file id hash
## Code After:
from os.path import join
from os import makedirs
from django.conf import settings
from datetime import datetime
import time
def make_file_id(file_data):
timestamp = datetime.now()
timestamp = time.mktime(timestamp.timetuple()) * 1e3 + timestamp.microsecond / 1e3
timestamp = '{}'.format(timestamp).encode()
return str(abs(hash(file_data + timestamp)))
def get_file_location(file_name, file_id):
path = join(settings.MEDIA_ROOT, 'tmp')
path = join(path, str(file_id))
try:
makedirs(path)
except:
pass
return join(path, file_name)
def get_file_url(file_name, file_id):
path = join(settings.MEDIA_URL, 'tmp')
path = join(path, str(file_id))
return join(path, file_name)
|
# ... existing code ...
from os.path import join
from os import makedirs
from django.conf import settings
from datetime import datetime
import time
def make_file_id(file_data):
timestamp = datetime.now()
timestamp = time.mktime(timestamp.timetuple()) * 1e3 + timestamp.microsecond / 1e3
timestamp = '{}'.format(timestamp).encode()
return str(abs(hash(file_data + timestamp)))
def get_file_location(file_name, file_id):
# ... rest of the code ...
|
6cf9a022850cef96ca409dba26fb6820a35f8eeb
|
server-coreless/src/main/java/org/openqa/selenium/server/BrowserConfigurationOptions.java
|
server-coreless/src/main/java/org/openqa/selenium/server/BrowserConfigurationOptions.java
|
package org.openqa.selenium.server;
public class BrowserConfigurationOptions {
private String profile = "";
public BrowserConfigurationOptions(String browserConfiguration) {
//"name:value;name:value"
String[] optionsPairList = browserConfiguration.split(";");
for (int i = 0; i < optionsPairList.length; i++) {
String[] option = optionsPairList[i].split(":", 2);
String optionsName = option[0].trim();
String optionValue = option[1].trim();
if ("profile".equalsIgnoreCase(optionsName)) {
this.profile = optionValue;
}
}
}
public BrowserConfigurationOptions() {}
/**
* Sets the profile name for this configuration object.
* @param profile_name The name of the profile to use
*/
public void setProfile(String profile_name) {
this.profile = profile_name;
}
public String serialize() {
//"profile:XXXXXXXXXX"
return String.format("profile:%s", profile);
}
public String getProfile() {
return profile;
}
}
|
package org.openqa.selenium.server;
public class BrowserConfigurationOptions {
private String profile = "";
public BrowserConfigurationOptions(String browserConfiguration) {
//"name:value;name:value"
String[] optionsPairList = browserConfiguration.split(";");
for (int i = 0; i < optionsPairList.length; i++) {
String[] option = optionsPairList[i].split(":", 2);
if (2 == option.length) {
String optionsName = option[0].trim();
String optionValue = option[1].trim();
if ("profile".equalsIgnoreCase(optionsName)) {
this.profile = optionValue;
}
}
}
}
public BrowserConfigurationOptions() {}
/**
* Sets the profile name for this configuration object.
* @param profile_name The name of the profile to use
*/
public void setProfile(String profile_name) {
this.profile = profile_name;
}
public String serialize() {
//"profile:XXXXXXXXXX"
return String.format("profile:%s", profile);
}
public String getProfile() {
return profile;
}
}
|
Fix index out of bounds exception by first checking to make sure the configuration option parsed correctly.
|
Fix index out of bounds exception by first checking to make sure the configuration option parsed correctly.
r5374
|
Java
|
apache-2.0
|
xsyntrex/selenium,stupidnetizen/selenium,dkentw/selenium,MCGallaspy/selenium,SevInf/IEDriver,vinay-qa/vinayit-android-server-apk,jsarenik/jajomojo-selenium,temyers/selenium,rovner/selenium,vveliev/selenium,anshumanchatterji/selenium,gregerrag/selenium,carsonmcdonald/selenium,sankha93/selenium,petruc/selenium,jerome-jacob/selenium,sevaseva/selenium,bayandin/selenium,TheBlackTuxCorp/selenium,gotcha/selenium,jsarenik/jajomojo-selenium,krosenvold/selenium,DrMarcII/selenium,alexec/selenium,markodolancic/selenium,alb-i986/selenium,arunsingh/selenium,jknguyen/josephknguyen-selenium,wambat/selenium,aluedeke/chromedriver,blackboarddd/selenium,asashour/selenium,onedox/selenium,dcjohnson1989/selenium,HtmlUnit/selenium,gemini-testing/selenium,aluedeke/chromedriver,MCGallaspy/selenium,SeleniumHQ/selenium,carsonmcdonald/selenium,Herst/selenium,alexec/selenium,krosenvold/selenium,customcommander/selenium,gemini-testing/selenium,chrisblock/selenium,sankha93/selenium,5hawnknight/selenium,chrsmithdemos/selenium,chrsmithdemos/selenium,Appdynamics/selenium,livioc/selenium,orange-tv-blagnac/selenium,eric-stanley/selenium,Jarob22/selenium,Sravyaksr/selenium,i17c/selenium,vinay-qa/vinayit-android-server-apk,bartolkaruza/selenium,gotcha/selenium,HtmlUnit/selenium,Appdynamics/selenium,tarlabs/selenium,bayandin/selenium,lummyare/lummyare-test,GorK-ChO/selenium,jabbrwcky/selenium,amar-sharma/selenium,markodolancic/selenium,vinay-qa/vinayit-android-server-apk,carlosroh/selenium,joshuaduffy/selenium,gorlemik/selenium,lmtierney/selenium,SevInf/IEDriver,isaksky/selenium,manuelpirez/selenium,titusfortner/selenium,knorrium/selenium,GorK-ChO/selenium,jsakamoto/selenium,denis-vilyuzhanin/selenium-fastview,doungni/selenium,bmannix/selenium,SeleniumHQ/selenium,MCGallaspy/selenium,houchj/selenium,krosenvold/selenium,rovner/selenium,tbeadle/selenium,TikhomirovSergey/selenium,mach6/selenium,skurochkin/selenium,customcommander/selenium,twalpole/selenium,Appdynamics/selenium,vveliev/selenium,misttechnologies/selenium,alb-i986/selenium,slongwang/selenium,compstak/selenium,compstak/selenium,temyers/selenium,twalpole/selenium,vveliev/selenium,joshmgrant/selenium,lummyare/lummyare-test,dimacus/selenium,JosephCastro/selenium,Ardesco/selenium,jerome-jacob/selenium,clavery/selenium,yukaReal/selenium,jsakamoto/selenium,soundcloud/selenium,titusfortner/selenium,dimacus/selenium,AutomatedTester/selenium,SouWilliams/selenium,AutomatedTester/selenium,valfirst/selenium,clavery/selenium,slongwang/selenium,gotcha/selenium,bayandin/selenium,SeleniumHQ/selenium,orange-tv-blagnac/selenium,lrowe/selenium,knorrium/selenium,jabbrwcky/selenium,dkentw/selenium,markodolancic/selenium,vinay-qa/vinayit-android-server-apk,jsarenik/jajomojo-selenium,denis-vilyuzhanin/selenium-fastview,krosenvold/selenium-git-release-candidate,valfirst/selenium,gemini-testing/selenium,davehunt/selenium,SouWilliams/selenium,joshmgrant/selenium,jknguyen/josephknguyen-selenium,zenefits/selenium,kalyanjvn1/selenium,SouWilliams/selenium,krosenvold/selenium-git-release-candidate,kalyanjvn1/selenium,xsyntrex/selenium,AutomatedTester/selenium,minhthuanit/selenium,isaksky/selenium,o-schneider/selenium,actmd/selenium,gregerrag/selenium,gabrielsimas/selenium,dibagga/selenium,mojwang/selenium,meksh/selenium,onedox/selenium,GorK-ChO/selenium,DrMarcII/selenium,manuelpirez/selenium,gabrielsimas/selenium,isaksky/selenium,meksh/selenium,twalpole/selenium,mach6/selenium,meksh/selenium,mestihudson/selenium,manuelpirez/selenium,manuelpirez/selenium,Dude-X/selenium,houchj/selenium,soundcloud/selenium,asashour/selenium,carsonmcdonald/selenium,amar-sharma/selenium,p0deje/selenium,aluedeke/chromedriver,customcommander/selenium,blueyed/selenium,mach6/selenium,orange-tv-blagnac/selenium,s2oBCN/selenium,joshuaduffy/selenium,freynaud/selenium,petruc/selenium,Herst/selenium,gurayinan/selenium,amar-sharma/selenium,asolntsev/selenium,onedox/selenium,Sravyaksr/selenium,valfirst/selenium,Tom-Trumper/selenium,mestihudson/selenium,titusfortner/selenium,o-schneider/selenium,dandv/selenium,MCGallaspy/selenium,dimacus/selenium,SevInf/IEDriver,lummyare/lummyare-test,telefonicaid/selenium,sankha93/selenium,valfirst/selenium,krmahadevan/selenium,vveliev/selenium,SevInf/IEDriver,Ardesco/selenium,gotcha/selenium,valfirst/selenium,soundcloud/selenium,TikhomirovSergey/selenium,chrisblock/selenium,bmannix/selenium,zenefits/selenium,gurayinan/selenium,Sravyaksr/selenium,eric-stanley/selenium,juangj/selenium,dbo/selenium,lilredindy/selenium,stupidnetizen/selenium,bartolkaruza/selenium,orange-tv-blagnac/selenium,dibagga/selenium,dandv/selenium,Herst/selenium,markodolancic/selenium,p0deje/selenium,sevaseva/selenium,joshbruning/selenium,krosenvold/selenium-git-release-candidate,meksh/selenium,chrisblock/selenium,gabrielsimas/selenium,amikey/selenium,sri85/selenium,Dude-X/selenium,manuelpirez/selenium,joshbruning/selenium,asashour/selenium,krosenvold/selenium-git-release-candidate,misttechnologies/selenium,rplevka/selenium,asolntsev/selenium,denis-vilyuzhanin/selenium-fastview,joshmgrant/selenium,JosephCastro/selenium,tarlabs/selenium,bartolkaruza/selenium,dibagga/selenium,lilredindy/selenium,5hawnknight/selenium,alb-i986/selenium,orange-tv-blagnac/selenium,telefonicaid/selenium,s2oBCN/selenium,s2oBCN/selenium,asolntsev/selenium,o-schneider/selenium,MeetMe/selenium,davehunt/selenium,denis-vilyuzhanin/selenium-fastview,sri85/selenium,dbo/selenium,MCGallaspy/selenium,petruc/selenium,BlackSmith/selenium,carlosroh/selenium,temyers/selenium,pulkitsinghal/selenium,onedox/selenium,soundcloud/selenium,bayandin/selenium,uchida/selenium,Jarob22/selenium,blackboarddd/selenium,aluedeke/chromedriver,gabrielsimas/selenium,Sravyaksr/selenium,sebady/selenium,krosenvold/selenium,mach6/selenium,manuelpirez/selenium,dcjohnson1989/selenium,Appdynamics/selenium,jsarenik/jajomojo-selenium,jabbrwcky/selenium,soundcloud/selenium,sevaseva/selenium,denis-vilyuzhanin/selenium-fastview,dibagga/selenium,valfirst/selenium,rrussell39/selenium,gabrielsimas/selenium,wambat/selenium,tarlabs/selenium,asolntsev/selenium,mestihudson/selenium,lukeis/selenium,dbo/selenium,GorK-ChO/selenium,temyers/selenium,mestihudson/selenium,carsonmcdonald/selenium,TheBlackTuxCorp/selenium,freynaud/selenium,joshuaduffy/selenium,manuelpirez/selenium,gurayinan/selenium,tarlabs/selenium,chrisblock/selenium,DrMarcII/selenium,tbeadle/selenium,chrisblock/selenium,i17c/selenium,xmhubj/selenium,TheBlackTuxCorp/selenium,sankha93/selenium,lummyare/lummyare-lummy,lmtierney/selenium,oddui/selenium,rplevka/selenium,lummyare/lummyare-lummy,gregerrag/selenium,Jarob22/selenium,GorK-ChO/selenium,jsakamoto/selenium,blueyed/selenium,Ardesco/selenium,anshumanchatterji/selenium,i17c/selenium,MeetMe/selenium,krosenvold/selenium,i17c/selenium,p0deje/selenium,mach6/selenium,oddui/selenium,SeleniumHQ/selenium,mojwang/selenium,soundcloud/selenium,actmd/selenium,krmahadevan/selenium,tbeadle/selenium,bartolkaruza/selenium,amikey/selenium,tkurnosova/selenium,sri85/selenium,quoideneuf/selenium,skurochkin/selenium,MeetMe/selenium,uchida/selenium,MCGallaspy/selenium,asolntsev/selenium,lilredindy/selenium,DrMarcII/selenium,sri85/selenium,dbo/selenium,joshmgrant/selenium,gabrielsimas/selenium,RamaraoDonta/ramarao-clone,mojwang/selenium,stupidnetizen/selenium,blackboarddd/selenium,dibagga/selenium,slongwang/selenium,pulkitsinghal/selenium,isaksky/selenium,rovner/selenium,alb-i986/selenium,meksh/selenium,houchj/selenium,rovner/selenium,p0deje/selenium,misttechnologies/selenium,carlosroh/selenium,MeetMe/selenium,TikhomirovSergey/selenium,dkentw/selenium,lilredindy/selenium,davehunt/selenium,gregerrag/selenium,houchj/selenium,chrsmithdemos/selenium,freynaud/selenium,TikhomirovSergey/selenium,freynaud/selenium,lrowe/selenium,rovner/selenium,TikhomirovSergey/selenium,jsakamoto/selenium,Tom-Trumper/selenium,dkentw/selenium,knorrium/selenium,lmtierney/selenium,Dude-X/selenium,gotcha/selenium,slongwang/selenium,rplevka/selenium,amar-sharma/selenium,sag-enorman/selenium,Dude-X/selenium,lummyare/lummyare-lummy,manuelpirez/selenium,lukeis/selenium,SeleniumHQ/selenium,carsonmcdonald/selenium,gorlemik/selenium,Dude-X/selenium,Ardesco/selenium,customcommander/selenium,vveliev/selenium,bayandin/selenium,davehunt/selenium,davehunt/selenium,twalpole/selenium,pulkitsinghal/selenium,xsyntrex/selenium,thanhpete/selenium,dimacus/selenium,HtmlUnit/selenium,zenefits/selenium,chrsmithdemos/selenium,HtmlUnit/selenium,s2oBCN/selenium,Jarob22/selenium,joshbruning/selenium,eric-stanley/selenium,oddui/selenium,titusfortner/selenium,DrMarcII/selenium,JosephCastro/selenium,lukeis/selenium,markodolancic/selenium,rplevka/selenium,dcjohnson1989/selenium,tkurnosova/selenium,Appdynamics/selenium,5hawnknight/selenium,jsarenik/jajomojo-selenium,clavery/selenium,HtmlUnit/selenium,compstak/selenium,jsakamoto/selenium,sebady/selenium,dandv/selenium,AutomatedTester/selenium,jsarenik/jajomojo-selenium,orange-tv-blagnac/selenium,arunsingh/selenium,petruc/selenium,rrussell39/selenium,anshumanchatterji/selenium,asashour/selenium,Appdynamics/selenium,HtmlUnit/selenium,sri85/selenium,quoideneuf/selenium,Jarob22/selenium,lmtierney/selenium,oddui/selenium,alexec/selenium,arunsingh/selenium,bayandin/selenium,o-schneider/selenium,gurayinan/selenium,skurochkin/selenium,5hawnknight/selenium,titusfortner/selenium,SeleniumHQ/selenium,actmd/selenium,kalyanjvn1/selenium,HtmlUnit/selenium,lukeis/selenium,arunsingh/selenium,arunsingh/selenium,gabrielsimas/selenium,actmd/selenium,DrMarcII/selenium,BlackSmith/selenium,yukaReal/selenium,joshbruning/selenium,xmhubj/selenium,meksh/selenium,stupidnetizen/selenium,skurochkin/selenium,sri85/selenium,MeetMe/selenium,chrsmithdemos/selenium,JosephCastro/selenium,TheBlackTuxCorp/selenium,krmahadevan/selenium,tkurnosova/selenium,Tom-Trumper/selenium,BlackSmith/selenium,amikey/selenium,asolntsev/selenium,sebady/selenium,gemini-testing/selenium,titusfortner/selenium,isaksky/selenium,gorlemik/selenium,RamaraoDonta/ramarao-clone,doungni/selenium,carsonmcdonald/selenium,anshumanchatterji/selenium,mach6/selenium,BlackSmith/selenium,lummyare/lummyare-test,jerome-jacob/selenium,RamaraoDonta/ramarao-clone,5hawnknight/selenium,stupidnetizen/selenium,jsakamoto/selenium,dcjohnson1989/selenium,SevInf/IEDriver,SeleniumHQ/selenium,p0deje/selenium,minhthuanit/selenium,arunsingh/selenium,asashour/selenium,juangj/selenium,gabrielsimas/selenium,dandv/selenium,oddui/selenium,Ardesco/selenium,livioc/selenium,gregerrag/selenium,tkurnosova/selenium,BlackSmith/selenium,dibagga/selenium,isaksky/selenium,5hawnknight/selenium,stupidnetizen/selenium,actmd/selenium,kalyanjvn1/selenium,dibagga/selenium,rrussell39/selenium,thanhpete/selenium,bmannix/selenium,markodolancic/selenium,asashour/selenium,Herst/selenium,petruc/selenium,sag-enorman/selenium,pulkitsinghal/selenium,lrowe/selenium,rplevka/selenium,denis-vilyuzhanin/selenium-fastview,dimacus/selenium,uchida/selenium,sevaseva/selenium,carlosroh/selenium,JosephCastro/selenium,joshuaduffy/selenium,vveliev/selenium,lilredindy/selenium,davehunt/selenium,sag-enorman/selenium,anshumanchatterji/selenium,jabbrwcky/selenium,skurochkin/selenium,dimacus/selenium,Appdynamics/selenium,Ardesco/selenium,chrisblock/selenium,gemini-testing/selenium,sankha93/selenium,bartolkaruza/selenium,gurayinan/selenium,Dude-X/selenium,MeetMe/selenium,jabbrwcky/selenium,freynaud/selenium,jknguyen/josephknguyen-selenium,o-schneider/selenium,jsakamoto/selenium,knorrium/selenium,gregerrag/selenium,dkentw/selenium,SouWilliams/selenium,jerome-jacob/selenium,JosephCastro/selenium,thanhpete/selenium,Herst/selenium,carlosroh/selenium,rplevka/selenium,rrussell39/selenium,joshbruning/selenium,joshmgrant/selenium,rovner/selenium,TikhomirovSergey/selenium,telefonicaid/selenium,TheBlackTuxCorp/selenium,asashour/selenium,telefonicaid/selenium,compstak/selenium,gorlemik/selenium,mach6/selenium,jabbrwcky/selenium,HtmlUnit/selenium,MeetMe/selenium,dandv/selenium,sag-enorman/selenium,yukaReal/selenium,soundcloud/selenium,misttechnologies/selenium,tkurnosova/selenium,AutomatedTester/selenium,mestihudson/selenium,markodolancic/selenium,Herst/selenium,thanhpete/selenium,Jarob22/selenium,quoideneuf/selenium,i17c/selenium,aluedeke/chromedriver,doungni/selenium,vveliev/selenium,joshmgrant/selenium,alb-i986/selenium,eric-stanley/selenium,titusfortner/selenium,onedox/selenium,quoideneuf/selenium,joshuaduffy/selenium,doungni/selenium,carsonmcdonald/selenium,uchida/selenium,asashour/selenium,valfirst/selenium,valfirst/selenium,dcjohnson1989/selenium,livioc/selenium,alb-i986/selenium,clavery/selenium,i17c/selenium,valfirst/selenium,gemini-testing/selenium,gorlemik/selenium,bmannix/selenium,knorrium/selenium,jabbrwcky/selenium,doungni/selenium,arunsingh/selenium,vinay-qa/vinayit-android-server-apk,krmahadevan/selenium,Tom-Trumper/selenium,s2oBCN/selenium,jknguyen/josephknguyen-selenium,twalpole/selenium,sebady/selenium,i17c/selenium,TheBlackTuxCorp/selenium,minhthuanit/selenium,lummyare/lummyare-test,blueyed/selenium,compstak/selenium,rrussell39/selenium,juangj/selenium,s2oBCN/selenium,alexec/selenium,o-schneider/selenium,amikey/selenium,gregerrag/selenium,gurayinan/selenium,mach6/selenium,Tom-Trumper/selenium,dimacus/selenium,5hawnknight/selenium,rrussell39/selenium,arunsingh/selenium,dandv/selenium,valfirst/selenium,lrowe/selenium,amar-sharma/selenium,temyers/selenium,minhthuanit/selenium,sankha93/selenium,juangj/selenium,meksh/selenium,chrsmithdemos/selenium,krmahadevan/selenium,skurochkin/selenium,amikey/selenium,oddui/selenium,joshuaduffy/selenium,freynaud/selenium,customcommander/selenium,houchj/selenium,lrowe/selenium,dkentw/selenium,isaksky/selenium,titusfortner/selenium,zenefits/selenium,lilredindy/selenium,wambat/selenium,krosenvold/selenium,zenefits/selenium,GorK-ChO/selenium,bartolkaruza/selenium,yukaReal/selenium,dkentw/selenium,lummyare/lummyare-lummy,eric-stanley/selenium,alb-i986/selenium,dcjohnson1989/selenium,dcjohnson1989/selenium,Tom-Trumper/selenium,sri85/selenium,rovner/selenium,JosephCastro/selenium,jsarenik/jajomojo-selenium,temyers/selenium,TheBlackTuxCorp/selenium,dibagga/selenium,TikhomirovSergey/selenium,krosenvold/selenium-git-release-candidate,eric-stanley/selenium,Ardesco/selenium,vinay-qa/vinayit-android-server-apk,xmhubj/selenium,juangj/selenium,dbo/selenium,SouWilliams/selenium,oddui/selenium,doungni/selenium,chrisblock/selenium,denis-vilyuzhanin/selenium-fastview,tbeadle/selenium,carsonmcdonald/selenium,slongwang/selenium,doungni/selenium,carlosroh/selenium,dbo/selenium,misttechnologies/selenium,xsyntrex/selenium,yukaReal/selenium,mojwang/selenium,denis-vilyuzhanin/selenium-fastview,xmhubj/selenium,tbeadle/selenium,anshumanchatterji/selenium,livioc/selenium,SouWilliams/selenium,minhthuanit/selenium,eric-stanley/selenium,carlosroh/selenium,Dude-X/selenium,freynaud/selenium,onedox/selenium,p0deje/selenium,tkurnosova/selenium,BlackSmith/selenium,alexec/selenium,lmtierney/selenium,s2oBCN/selenium,HtmlUnit/selenium,actmd/selenium,gregerrag/selenium,jknguyen/josephknguyen-selenium,bmannix/selenium,wambat/selenium,tbeadle/selenium,TikhomirovSergey/selenium,clavery/selenium,GorK-ChO/selenium,joshmgrant/selenium,BlackSmith/selenium,gabrielsimas/selenium,TheBlackTuxCorp/selenium,Sravyaksr/selenium,tkurnosova/selenium,mestihudson/selenium,dandv/selenium,zenefits/selenium,o-schneider/selenium,doungni/selenium,twalpole/selenium,krosenvold/selenium,blueyed/selenium,actmd/selenium,vveliev/selenium,sag-enorman/selenium,yukaReal/selenium,gotcha/selenium,Tom-Trumper/selenium,amikey/selenium,vinay-qa/vinayit-android-server-apk,lummyare/lummyare-lummy,dcjohnson1989/selenium,tbeadle/selenium,SevInf/IEDriver,alexec/selenium,bayandin/selenium,lummyare/lummyare-test,thanhpete/selenium,joshmgrant/selenium,i17c/selenium,RamaraoDonta/ramarao-clone,Tom-Trumper/selenium,RamaraoDonta/ramarao-clone,rplevka/selenium,GorK-ChO/selenium,chrisblock/selenium,mach6/selenium,gemini-testing/selenium,doungni/selenium,alexec/selenium,BlackSmith/selenium,slongwang/selenium,xmhubj/selenium,SevInf/IEDriver,chrsmithdemos/selenium,anshumanchatterji/selenium,i17c/selenium,actmd/selenium,knorrium/selenium,misttechnologies/selenium,krmahadevan/selenium,RamaraoDonta/ramarao-clone,sebady/selenium,quoideneuf/selenium,onedox/selenium,kalyanjvn1/selenium,Ardesco/selenium,lukeis/selenium,clavery/selenium,MeetMe/selenium,asolntsev/selenium,thanhpete/selenium,misttechnologies/selenium,rrussell39/selenium,sag-enorman/selenium,xsyntrex/selenium,twalpole/selenium,jknguyen/josephknguyen-selenium,o-schneider/selenium,Jarob22/selenium,telefonicaid/selenium,lmtierney/selenium,telefonicaid/selenium,xsyntrex/selenium,p0deje/selenium,SeleniumHQ/selenium,titusfortner/selenium,oddui/selenium,freynaud/selenium,dbo/selenium,gotcha/selenium,krmahadevan/selenium,joshmgrant/selenium,xsyntrex/selenium,jknguyen/josephknguyen-selenium,houchj/selenium,lukeis/selenium,lilredindy/selenium,sri85/selenium,Appdynamics/selenium,xsyntrex/selenium,5hawnknight/selenium,DrMarcII/selenium,pulkitsinghal/selenium,petruc/selenium,livioc/selenium,jerome-jacob/selenium,krosenvold/selenium,petruc/selenium,joshbruning/selenium,twalpole/selenium,RamaraoDonta/ramarao-clone,lilredindy/selenium,krmahadevan/selenium,amar-sharma/selenium,blueyed/selenium,stupidnetizen/selenium,jerome-jacob/selenium,blueyed/selenium,meksh/selenium,blackboarddd/selenium,gurayinan/selenium,alexec/selenium,titusfortner/selenium,kalyanjvn1/selenium,temyers/selenium,Sravyaksr/selenium,blueyed/selenium,customcommander/selenium,SeleniumHQ/selenium,temyers/selenium,orange-tv-blagnac/selenium,rovner/selenium,thanhpete/selenium,yukaReal/selenium,sankha93/selenium,MCGallaspy/selenium,blackboarddd/selenium,gorlemik/selenium,temyers/selenium,MCGallaspy/selenium,lummyare/lummyare-test,xmhubj/selenium,jsakamoto/selenium,amar-sharma/selenium,DrMarcII/selenium,yukaReal/selenium,5hawnknight/selenium,slongwang/selenium,tarlabs/selenium,alb-i986/selenium,aluedeke/chromedriver,SouWilliams/selenium,blackboarddd/selenium,juangj/selenium,markodolancic/selenium,lrowe/selenium,JosephCastro/selenium,uchida/selenium,sevaseva/selenium,mojwang/selenium,eric-stanley/selenium,joshbruning/selenium,MCGallaspy/selenium,davehunt/selenium,valfirst/selenium,sevaseva/selenium,misttechnologies/selenium,zenefits/selenium,mojwang/selenium,minhthuanit/selenium,Sravyaksr/selenium,oddui/selenium,minhthuanit/selenium,anshumanchatterji/selenium,rplevka/selenium,aluedeke/chromedriver,TikhomirovSergey/selenium,lrowe/selenium,skurochkin/selenium,rrussell39/selenium,joshuaduffy/selenium,aluedeke/chromedriver,jsakamoto/selenium,petruc/selenium,sebady/selenium,customcommander/selenium,p0deje/selenium,AutomatedTester/selenium,alexec/selenium,rplevka/selenium,Herst/selenium,BlackSmith/selenium,lummyare/lummyare-lummy,carlosroh/selenium,thanhpete/selenium,tarlabs/selenium,denis-vilyuzhanin/selenium-fastview,gurayinan/selenium,jerome-jacob/selenium,sag-enorman/selenium,quoideneuf/selenium,knorrium/selenium,asolntsev/selenium,dbo/selenium,wambat/selenium,sag-enorman/selenium,HtmlUnit/selenium,AutomatedTester/selenium,mestihudson/selenium,livioc/selenium,twalpole/selenium,onedox/selenium,anshumanchatterji/selenium,bartolkaruza/selenium,SouWilliams/selenium,zenefits/selenium,SevInf/IEDriver,p0deje/selenium,sebady/selenium,lukeis/selenium,pulkitsinghal/selenium,titusfortner/selenium,krosenvold/selenium-git-release-candidate,soundcloud/selenium,sevaseva/selenium,uchida/selenium,blueyed/selenium,xmhubj/selenium,mojwang/selenium,orange-tv-blagnac/selenium,mestihudson/selenium,dandv/selenium,chrsmithdemos/selenium,gregerrag/selenium,Ardesco/selenium,quoideneuf/selenium,lummyare/lummyare-lummy,markodolancic/selenium,chrisblock/selenium,sri85/selenium,houchj/selenium,petruc/selenium,RamaraoDonta/ramarao-clone,joshbruning/selenium,blackboarddd/selenium,quoideneuf/selenium,jknguyen/josephknguyen-selenium,telefonicaid/selenium,dibagga/selenium,arunsingh/selenium,soundcloud/selenium,blackboarddd/selenium,lmtierney/selenium,lmtierney/selenium,stupidnetizen/selenium,bartolkaruza/selenium,slongwang/selenium,Jarob22/selenium,livioc/selenium,misttechnologies/selenium,carsonmcdonald/selenium,uchida/selenium,bartolkaruza/selenium,kalyanjvn1/selenium,houchj/selenium,vveliev/selenium,amar-sharma/selenium,sag-enorman/selenium,actmd/selenium,krosenvold/selenium,sevaseva/selenium,Herst/selenium,wambat/selenium,onedox/selenium,AutomatedTester/selenium,clavery/selenium,tbeadle/selenium,blueyed/selenium,pulkitsinghal/selenium,tbeadle/selenium,alb-i986/selenium,aluedeke/chromedriver,joshuaduffy/selenium,joshmgrant/selenium,AutomatedTester/selenium,clavery/selenium,lrowe/selenium,telefonicaid/selenium,jsarenik/jajomojo-selenium,amikey/selenium,lummyare/lummyare-lummy,joshuaduffy/selenium,jabbrwcky/selenium,RamaraoDonta/ramarao-clone,dkentw/selenium,freynaud/selenium,stupidnetizen/selenium,TheBlackTuxCorp/selenium,gemini-testing/selenium,SeleniumHQ/selenium,livioc/selenium,krmahadevan/selenium,Herst/selenium,vinay-qa/vinayit-android-server-apk,Sravyaksr/selenium,bmannix/selenium,jabbrwcky/selenium,juangj/selenium,yukaReal/selenium,thanhpete/selenium,Dude-X/selenium,kalyanjvn1/selenium,xmhubj/selenium,dbo/selenium,juangj/selenium,tarlabs/selenium,pulkitsinghal/selenium,xsyntrex/selenium,livioc/selenium,dcjohnson1989/selenium,vinay-qa/vinayit-android-server-apk,davehunt/selenium,bmannix/selenium,mojwang/selenium,jsarenik/jajomojo-selenium,pulkitsinghal/selenium,tarlabs/selenium,lilredindy/selenium,asashour/selenium,gorlemik/selenium,customcommander/selenium,jerome-jacob/selenium,JosephCastro/selenium,sebady/selenium,lummyare/lummyare-test,meksh/selenium,uchida/selenium,gemini-testing/selenium,joshbruning/selenium,dimacus/selenium,zenefits/selenium,juangj/selenium,krosenvold/selenium-git-release-candidate,quoideneuf/selenium,sevaseva/selenium,Appdynamics/selenium,dkentw/selenium,gurayinan/selenium,Tom-Trumper/selenium,knorrium/selenium,gorlemik/selenium,lrowe/selenium,tkurnosova/selenium,gotcha/selenium,sebady/selenium,mojwang/selenium,rrussell39/selenium,tkurnosova/selenium,bayandin/selenium,lukeis/selenium,s2oBCN/selenium,davehunt/selenium,gotcha/selenium,Jarob22/selenium,clavery/selenium,carlosroh/selenium,mestihudson/selenium,skurochkin/selenium,Sravyaksr/selenium,chrsmithdemos/selenium,jknguyen/josephknguyen-selenium,wambat/selenium,rovner/selenium,bayandin/selenium,compstak/selenium,xmhubj/selenium,lmtierney/selenium,amikey/selenium,uchida/selenium,lummyare/lummyare-lummy,krosenvold/selenium-git-release-candidate,bmannix/selenium,dandv/selenium,o-schneider/selenium,jerome-jacob/selenium,blackboarddd/selenium,skurochkin/selenium,SeleniumHQ/selenium,SouWilliams/selenium,eric-stanley/selenium,SevInf/IEDriver,bmannix/selenium,s2oBCN/selenium,wambat/selenium,isaksky/selenium,sankha93/selenium,lummyare/lummyare-test,DrMarcII/selenium,GorK-ChO/selenium,tarlabs/selenium,sankha93/selenium,customcommander/selenium,manuelpirez/selenium,compstak/selenium,orange-tv-blagnac/selenium,Dude-X/selenium,amikey/selenium,joshmgrant/selenium,asolntsev/selenium,compstak/selenium,slongwang/selenium,houchj/selenium,gorlemik/selenium,dimacus/selenium,MeetMe/selenium,isaksky/selenium,amar-sharma/selenium,telefonicaid/selenium,knorrium/selenium,minhthuanit/selenium,lukeis/selenium,wambat/selenium,compstak/selenium,minhthuanit/selenium,kalyanjvn1/selenium
|
java
|
## Code Before:
package org.openqa.selenium.server;
public class BrowserConfigurationOptions {
private String profile = "";
public BrowserConfigurationOptions(String browserConfiguration) {
//"name:value;name:value"
String[] optionsPairList = browserConfiguration.split(";");
for (int i = 0; i < optionsPairList.length; i++) {
String[] option = optionsPairList[i].split(":", 2);
String optionsName = option[0].trim();
String optionValue = option[1].trim();
if ("profile".equalsIgnoreCase(optionsName)) {
this.profile = optionValue;
}
}
}
public BrowserConfigurationOptions() {}
/**
* Sets the profile name for this configuration object.
* @param profile_name The name of the profile to use
*/
public void setProfile(String profile_name) {
this.profile = profile_name;
}
public String serialize() {
//"profile:XXXXXXXXXX"
return String.format("profile:%s", profile);
}
public String getProfile() {
return profile;
}
}
## Instruction:
Fix index out of bounds exception by first checking to make sure the configuration option parsed correctly.
r5374
## Code After:
package org.openqa.selenium.server;
public class BrowserConfigurationOptions {
private String profile = "";
public BrowserConfigurationOptions(String browserConfiguration) {
//"name:value;name:value"
String[] optionsPairList = browserConfiguration.split(";");
for (int i = 0; i < optionsPairList.length; i++) {
String[] option = optionsPairList[i].split(":", 2);
if (2 == option.length) {
String optionsName = option[0].trim();
String optionValue = option[1].trim();
if ("profile".equalsIgnoreCase(optionsName)) {
this.profile = optionValue;
}
}
}
}
public BrowserConfigurationOptions() {}
/**
* Sets the profile name for this configuration object.
* @param profile_name The name of the profile to use
*/
public void setProfile(String profile_name) {
this.profile = profile_name;
}
public String serialize() {
//"profile:XXXXXXXXXX"
return String.format("profile:%s", profile);
}
public String getProfile() {
return profile;
}
}
|
# ... existing code ...
String[] optionsPairList = browserConfiguration.split(";");
for (int i = 0; i < optionsPairList.length; i++) {
String[] option = optionsPairList[i].split(":", 2);
if (2 == option.length) {
String optionsName = option[0].trim();
String optionValue = option[1].trim();
if ("profile".equalsIgnoreCase(optionsName)) {
this.profile = optionValue;
}
}
}
}
# ... rest of the code ...
|
50f2cd076aae183376ab14d31594c104ac210738
|
shivyc.py
|
shivyc.py
|
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The C file to compile
parser.add_argument("file_name")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
Rename file_name argument on command line
|
Rename file_name argument on command line
|
Python
|
mit
|
ShivamSarodia/ShivyC,ShivamSarodia/ShivyC,ShivamSarodia/ShivyC
|
python
|
## Code Before:
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The C file to compile
parser.add_argument("file_name")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
## Instruction:
Rename file_name argument on command line
## Code After:
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
# ... existing code ...
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
# ... rest of the code ...
|
ce2f370854b62371505f54c7cc12077d36e1e21c
|
src/main/java/uk/gov/DestinationPostgresDB.java
|
src/main/java/uk/gov/DestinationPostgresDB.java
|
package uk.gov;
import org.postgresql.util.PGobject;
import java.nio.charset.Charset;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
class DestinationPostgresDB extends PostgresDB {
private final String indexedEntriesTableName;
private final String waterMarkTableName;
public DestinationPostgresDB(String connectionString) throws SQLException {
super(connectionString);
this.indexedEntriesTableName = "ordered_entry_index";
this.waterMarkTableName = "streamed_entries";
}
public void write(ResultSet resultSet) throws SQLException {
while (resultSet.next()) {
try (PreparedStatement statement = connection.prepareStatement("INSERT INTO " + indexedEntriesTableName + "(ENTRY) VALUES(?)")) {
statement.setObject(1, jsonbObject(resultSet.getBytes("ENTRY")));
statement.executeUpdate();
}
}
}
public int currentWaterMark() throws SQLException {
try (Statement statement = connection.createStatement()) {
try(ResultSet resultSet = statement.executeQuery("SELECT ID FROM " + waterMarkTableName)){
return resultSet.next() ? resultSet.getInt("ID") : 0;
}
}
}
private PGobject jsonbObject(byte[] value) {
try {
PGobject pGobject = new PGobject();
pGobject.setType("jsonb");
pGobject.setValue(new String(value, Charset.forName("UTF-8")));
return pGobject;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
|
package uk.gov;
import org.postgresql.util.PGobject;
import java.nio.charset.Charset;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
class DestinationPostgresDB extends PostgresDB {
private final String indexedEntriesTableName;
private final String waterMarkTableName;
public DestinationPostgresDB(String connectionString) throws SQLException {
super(connectionString);
this.indexedEntriesTableName = "ordered_entry_index";
this.waterMarkTableName = "streamed_entries";
}
public void write(ResultSet resultSet) throws SQLException {
while (resultSet.next()) {
connection.setAutoCommit(false);
try (PreparedStatement statement = connection.prepareStatement("INSERT INTO " + indexedEntriesTableName + "(ENTRY) VALUES(?)")) {
statement.setObject(1, jsonbObject(resultSet.getBytes("ENTRY")));
statement.executeUpdate();
}
try (PreparedStatement statement = connection.prepareStatement("UPDATE " + waterMarkTableName + " SET ID = ID + 1")) {
statement.executeUpdate();
}
connection.setAutoCommit(true);
}
}
public int currentWaterMark() throws SQLException {
try (Statement statement = connection.createStatement()) {
try(ResultSet resultSet = statement.executeQuery("SELECT ID FROM " + waterMarkTableName)){
return resultSet.next() ? resultSet.getInt("ID") : 0;
}
}
}
private PGobject jsonbObject(byte[] value) {
try {
PGobject pGobject = new PGobject();
pGobject.setType("jsonb");
pGobject.setValue(new String(value, Charset.forName("UTF-8")));
return pGobject;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
|
Increase watermark by one everytime when an entry is written
|
Increase watermark by one everytime when an entry is written
|
Java
|
mit
|
openregister/openregister-java,openregister/openregister-java,openregister/openregister-java,openregister/openregister-java,openregister/openregister-java
|
java
|
## Code Before:
package uk.gov;
import org.postgresql.util.PGobject;
import java.nio.charset.Charset;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
class DestinationPostgresDB extends PostgresDB {
private final String indexedEntriesTableName;
private final String waterMarkTableName;
public DestinationPostgresDB(String connectionString) throws SQLException {
super(connectionString);
this.indexedEntriesTableName = "ordered_entry_index";
this.waterMarkTableName = "streamed_entries";
}
public void write(ResultSet resultSet) throws SQLException {
while (resultSet.next()) {
try (PreparedStatement statement = connection.prepareStatement("INSERT INTO " + indexedEntriesTableName + "(ENTRY) VALUES(?)")) {
statement.setObject(1, jsonbObject(resultSet.getBytes("ENTRY")));
statement.executeUpdate();
}
}
}
public int currentWaterMark() throws SQLException {
try (Statement statement = connection.createStatement()) {
try(ResultSet resultSet = statement.executeQuery("SELECT ID FROM " + waterMarkTableName)){
return resultSet.next() ? resultSet.getInt("ID") : 0;
}
}
}
private PGobject jsonbObject(byte[] value) {
try {
PGobject pGobject = new PGobject();
pGobject.setType("jsonb");
pGobject.setValue(new String(value, Charset.forName("UTF-8")));
return pGobject;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
## Instruction:
Increase watermark by one everytime when an entry is written
## Code After:
package uk.gov;
import org.postgresql.util.PGobject;
import java.nio.charset.Charset;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
class DestinationPostgresDB extends PostgresDB {
private final String indexedEntriesTableName;
private final String waterMarkTableName;
public DestinationPostgresDB(String connectionString) throws SQLException {
super(connectionString);
this.indexedEntriesTableName = "ordered_entry_index";
this.waterMarkTableName = "streamed_entries";
}
public void write(ResultSet resultSet) throws SQLException {
while (resultSet.next()) {
connection.setAutoCommit(false);
try (PreparedStatement statement = connection.prepareStatement("INSERT INTO " + indexedEntriesTableName + "(ENTRY) VALUES(?)")) {
statement.setObject(1, jsonbObject(resultSet.getBytes("ENTRY")));
statement.executeUpdate();
}
try (PreparedStatement statement = connection.prepareStatement("UPDATE " + waterMarkTableName + " SET ID = ID + 1")) {
statement.executeUpdate();
}
connection.setAutoCommit(true);
}
}
public int currentWaterMark() throws SQLException {
try (Statement statement = connection.createStatement()) {
try(ResultSet resultSet = statement.executeQuery("SELECT ID FROM " + waterMarkTableName)){
return resultSet.next() ? resultSet.getInt("ID") : 0;
}
}
}
private PGobject jsonbObject(byte[] value) {
try {
PGobject pGobject = new PGobject();
pGobject.setType("jsonb");
pGobject.setValue(new String(value, Charset.forName("UTF-8")));
return pGobject;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
|
// ... existing code ...
public void write(ResultSet resultSet) throws SQLException {
while (resultSet.next()) {
connection.setAutoCommit(false);
try (PreparedStatement statement = connection.prepareStatement("INSERT INTO " + indexedEntriesTableName + "(ENTRY) VALUES(?)")) {
statement.setObject(1, jsonbObject(resultSet.getBytes("ENTRY")));
statement.executeUpdate();
}
try (PreparedStatement statement = connection.prepareStatement("UPDATE " + waterMarkTableName + " SET ID = ID + 1")) {
statement.executeUpdate();
}
connection.setAutoCommit(true);
}
}
// ... rest of the code ...
|
c040f0b93426374ca452ef779f4329f4b91bc291
|
core/src/main/java/forklift/consumer/ProcessStep.java
|
core/src/main/java/forklift/consumer/ProcessStep.java
|
package forklift.consumer;
public enum ProcessStep {
Pending,
Validating,
Invalid,
Processing,
Error,
Complete
}
|
package forklift.consumer;
public enum ProcessStep {
Pending,
Validating,
Invalid,
Processing,
Retrying,
Error,
Complete
}
|
Add the retrying process step to indicate a message is being attempted for the nth time.
|
Add the retrying process step to indicate a message is being attempted for the nth time.
|
Java
|
mit
|
dcshock/forklift,dcshock/forklift
|
java
|
## Code Before:
package forklift.consumer;
public enum ProcessStep {
Pending,
Validating,
Invalid,
Processing,
Error,
Complete
}
## Instruction:
Add the retrying process step to indicate a message is being attempted for the nth time.
## Code After:
package forklift.consumer;
public enum ProcessStep {
Pending,
Validating,
Invalid,
Processing,
Retrying,
Error,
Complete
}
|
# ... existing code ...
Validating,
Invalid,
Processing,
Retrying,
Error,
Complete
}
# ... rest of the code ...
|
939a9e0da24c0f2626f985524e9654bc5c7a23e9
|
src/protocolsupport/protocol/utils/spoofedata/PaperSpoofedDataParser.java
|
src/protocolsupport/protocol/utils/spoofedata/PaperSpoofedDataParser.java
|
package protocolsupport.protocol.utils.spoofedata;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.function.Function;
import org.bukkit.Bukkit;
import com.destroystokyo.paper.event.player.PlayerHandshakeEvent;
import com.destroystokyo.paper.profile.ProfileProperty;
import com.google.gson.reflect.TypeToken;
import protocolsupport.utils.Utils;
public class PaperSpoofedDataParser implements Function<String, SpoofedData> {
protected static final Type properties_type = new TypeToken<Collection<ProfileProperty>>() {}.getType();
@Override
public SpoofedData apply(String hostname) {
if (PlayerHandshakeEvent.getHandlerList().getRegisteredListeners().length != 0) {
PlayerHandshakeEvent handshakeEvent = new PlayerHandshakeEvent(hostname, false);
Bukkit.getPluginManager().callEvent(handshakeEvent);
if (!handshakeEvent.isCancelled()) {
if (handshakeEvent.isFailed()) {
return new SpoofedData(handshakeEvent.getFailMessage());
}
return new SpoofedData(
handshakeEvent.getServerHostname(),
handshakeEvent.getSocketAddressHostname(),
handshakeEvent.getUniqueId(),
Utils.GSON.fromJson(handshakeEvent.getPropertiesJson(), properties_type)
);
}
}
return null;
}
}
|
package protocolsupport.protocol.utils.spoofedata;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.function.Function;
import org.bukkit.Bukkit;
import com.destroystokyo.paper.event.player.PlayerHandshakeEvent;
import com.google.gson.reflect.TypeToken;
import protocolsupport.api.utils.ProfileProperty;
import protocolsupport.utils.Utils;
public class PaperSpoofedDataParser implements Function<String, SpoofedData> {
protected static final Type properties_type = new TypeToken<Collection<ProfileProperty>>() {}.getType();
@Override
public SpoofedData apply(String hostname) {
if (PlayerHandshakeEvent.getHandlerList().getRegisteredListeners().length != 0) {
PlayerHandshakeEvent handshakeEvent = new PlayerHandshakeEvent(hostname, false);
Bukkit.getPluginManager().callEvent(handshakeEvent);
if (!handshakeEvent.isCancelled()) {
if (handshakeEvent.isFailed()) {
return new SpoofedData(handshakeEvent.getFailMessage());
}
return new SpoofedData(
handshakeEvent.getServerHostname(),
handshakeEvent.getSocketAddressHostname(),
handshakeEvent.getUniqueId(),
Utils.GSON.fromJson(handshakeEvent.getPropertiesJson(), properties_type)
);
}
}
return null;
}
}
|
Fix wrong properties type used
|
Fix wrong properties type used
|
Java
|
agpl-3.0
|
ProtocolSupport/ProtocolSupport
|
java
|
## Code Before:
package protocolsupport.protocol.utils.spoofedata;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.function.Function;
import org.bukkit.Bukkit;
import com.destroystokyo.paper.event.player.PlayerHandshakeEvent;
import com.destroystokyo.paper.profile.ProfileProperty;
import com.google.gson.reflect.TypeToken;
import protocolsupport.utils.Utils;
public class PaperSpoofedDataParser implements Function<String, SpoofedData> {
protected static final Type properties_type = new TypeToken<Collection<ProfileProperty>>() {}.getType();
@Override
public SpoofedData apply(String hostname) {
if (PlayerHandshakeEvent.getHandlerList().getRegisteredListeners().length != 0) {
PlayerHandshakeEvent handshakeEvent = new PlayerHandshakeEvent(hostname, false);
Bukkit.getPluginManager().callEvent(handshakeEvent);
if (!handshakeEvent.isCancelled()) {
if (handshakeEvent.isFailed()) {
return new SpoofedData(handshakeEvent.getFailMessage());
}
return new SpoofedData(
handshakeEvent.getServerHostname(),
handshakeEvent.getSocketAddressHostname(),
handshakeEvent.getUniqueId(),
Utils.GSON.fromJson(handshakeEvent.getPropertiesJson(), properties_type)
);
}
}
return null;
}
}
## Instruction:
Fix wrong properties type used
## Code After:
package protocolsupport.protocol.utils.spoofedata;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.function.Function;
import org.bukkit.Bukkit;
import com.destroystokyo.paper.event.player.PlayerHandshakeEvent;
import com.google.gson.reflect.TypeToken;
import protocolsupport.api.utils.ProfileProperty;
import protocolsupport.utils.Utils;
public class PaperSpoofedDataParser implements Function<String, SpoofedData> {
protected static final Type properties_type = new TypeToken<Collection<ProfileProperty>>() {}.getType();
@Override
public SpoofedData apply(String hostname) {
if (PlayerHandshakeEvent.getHandlerList().getRegisteredListeners().length != 0) {
PlayerHandshakeEvent handshakeEvent = new PlayerHandshakeEvent(hostname, false);
Bukkit.getPluginManager().callEvent(handshakeEvent);
if (!handshakeEvent.isCancelled()) {
if (handshakeEvent.isFailed()) {
return new SpoofedData(handshakeEvent.getFailMessage());
}
return new SpoofedData(
handshakeEvent.getServerHostname(),
handshakeEvent.getSocketAddressHostname(),
handshakeEvent.getUniqueId(),
Utils.GSON.fromJson(handshakeEvent.getPropertiesJson(), properties_type)
);
}
}
return null;
}
}
|
...
import org.bukkit.Bukkit;
import com.destroystokyo.paper.event.player.PlayerHandshakeEvent;
import com.google.gson.reflect.TypeToken;
import protocolsupport.api.utils.ProfileProperty;
import protocolsupport.utils.Utils;
public class PaperSpoofedDataParser implements Function<String, SpoofedData> {
...
|
cdf3686150309800cb28f584b64b9175aa4b5662
|
tests/unit_tests/gather_tests/MameSink_test.py
|
tests/unit_tests/gather_tests/MameSink_test.py
|
import pytest
from cps2_zmq.gather import MameSink
@pytest.mark.parametrize("message, expected",[
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(message, expected, worker):
sink = MameSink.MameSink("inproc://help")
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
sink._cleanup()
# @pytest.mark.parametrize("messages, expected", [
# ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
# {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
# ])
# @pytest.mark.timeout(timeout=10, method='thread')
# def test_run(workers, messages, expected):
# sink = MameSink.MameSink("inproc://frommockworkers")
# sink.setup_workers2(workers)
# pass
|
import pytest
from cps2_zmq.gather import MameSink
@pytest.fixture(scope="module")
def sink():
sink = MameSink.MameSink("inproc://frommockworkers")
yield sink
sink.cleanup()
class TestSink(object):
@pytest.fixture(autouse=True)
def refresh(self, sink):
pass
yield
sink._msgsrecv = 0
@pytest.mark.parametrize("message, expected", [
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(self, message, expected, sink, worker):
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
def test_run(self, sink, tworkers):
# sink = MameSink.MameSink("inproc://frommockworkers")
messages = ['some result', 'closing', 'threaddead']
for worker in tworkers:
worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
worker.connect_push("inproc://frommockworkers")
sink.setup_workers2(tworkers)
sink.start()
#block and let the sink run
sink.join()
assert not sink.workers
assert sink._msgsrecv == len(tworkers) * len(messages)
|
Test Class now returns to base state between different groups of tests
|
Test Class now returns to base state between different groups of tests
|
Python
|
mit
|
goosechooser/cps2-zmq
|
python
|
## Code Before:
import pytest
from cps2_zmq.gather import MameSink
@pytest.mark.parametrize("message, expected",[
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(message, expected, worker):
sink = MameSink.MameSink("inproc://help")
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
sink._cleanup()
# @pytest.mark.parametrize("messages, expected", [
# ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
# {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
# ])
# @pytest.mark.timeout(timeout=10, method='thread')
# def test_run(workers, messages, expected):
# sink = MameSink.MameSink("inproc://frommockworkers")
# sink.setup_workers2(workers)
# pass
## Instruction:
Test Class now returns to base state between different groups of tests
## Code After:
import pytest
from cps2_zmq.gather import MameSink
@pytest.fixture(scope="module")
def sink():
sink = MameSink.MameSink("inproc://frommockworkers")
yield sink
sink.cleanup()
class TestSink(object):
@pytest.fixture(autouse=True)
def refresh(self, sink):
pass
yield
sink._msgsrecv = 0
@pytest.mark.parametrize("message, expected", [
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(self, message, expected, sink, worker):
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
def test_run(self, sink, tworkers):
# sink = MameSink.MameSink("inproc://frommockworkers")
messages = ['some result', 'closing', 'threaddead']
for worker in tworkers:
worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
worker.connect_push("inproc://frommockworkers")
sink.setup_workers2(tworkers)
sink.start()
#block and let the sink run
sink.join()
assert not sink.workers
assert sink._msgsrecv == len(tworkers) * len(messages)
|
# ... existing code ...
import pytest
from cps2_zmq.gather import MameSink
@pytest.fixture(scope="module")
def sink():
sink = MameSink.MameSink("inproc://frommockworkers")
yield sink
sink.cleanup()
class TestSink(object):
@pytest.fixture(autouse=True)
def refresh(self, sink):
pass
yield
sink._msgsrecv = 0
@pytest.mark.parametrize("message, expected", [
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(self, message, expected, sink, worker):
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
def test_run(self, sink, tworkers):
# sink = MameSink.MameSink("inproc://frommockworkers")
messages = ['some result', 'closing', 'threaddead']
for worker in tworkers:
worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
worker.connect_push("inproc://frommockworkers")
sink.setup_workers2(tworkers)
sink.start()
#block and let the sink run
sink.join()
assert not sink.workers
assert sink._msgsrecv == len(tworkers) * len(messages)
# ... rest of the code ...
|
754fbe3028dff6448a4d50ead35911578f91c7d8
|
test/test_encode_atom.c
|
test/test_encode_atom.c
|
unsigned char output[6];
void test_output()
{
if (output[0] != BERT_MAGIC)
{
test_fail("bert_encoder_push did not add the magic byte");
}
if (output[1] != BERT_ATOM)
{
test_fail("bert_encoder_push did not add the SMALL_INT magic byte");
}
size_t expected_length = 2;
if (output[3] != expected_length)
{
test_fail("bert_encoder_push encoded %u as the atom length, expected %u",output[3],expected_length);
}
const char *expected = "id";
test_strings((const char *)(output+4),expected,expected_length);
}
int main()
{
bert_encoder_t *encoder = test_encoder(output,6);
bert_data_t *data;
if (!(data = bert_data_create_atom("id")))
{
test_fail("malloc failed");
}
test_encoder_push(encoder,data);
bert_data_destroy(data);
bert_encoder_destroy(encoder);
test_output();
return 0;
}
|
unsigned char output[OUTPUT_SIZE];
void test_output()
{
if (output[0] != BERT_MAGIC)
{
test_fail("bert_encoder_push did not add the magic byte");
}
if (output[1] != BERT_ATOM)
{
test_fail("bert_encoder_push did not add the SMALL_INT magic byte");
}
if (output[3] != EXPECTED_LENGTH)
{
test_fail("bert_encoder_push encoded %u as the atom length, expected %u",output[3],expected_length);
}
test_strings((const char *)(output+4),EXPECTED,expected_length);
}
int main()
{
bert_encoder_t *encoder = test_encoder(output,OUTPUT_SIZE);
bert_data_t *data;
if (!(data = bert_data_create_atom(EXPECTED)))
{
test_fail("malloc failed");
}
test_encoder_push(encoder,data);
bert_data_destroy(data);
bert_encoder_destroy(encoder);
test_output();
return 0;
}
|
Use EXPECTED/EXPECTED_LENGTH/OUTPUT_SIZE macros in the atom encoding test.
|
Use EXPECTED/EXPECTED_LENGTH/OUTPUT_SIZE macros in the atom encoding test.
|
C
|
mit
|
postmodern/libBERT
|
c
|
## Code Before:
unsigned char output[6];
void test_output()
{
if (output[0] != BERT_MAGIC)
{
test_fail("bert_encoder_push did not add the magic byte");
}
if (output[1] != BERT_ATOM)
{
test_fail("bert_encoder_push did not add the SMALL_INT magic byte");
}
size_t expected_length = 2;
if (output[3] != expected_length)
{
test_fail("bert_encoder_push encoded %u as the atom length, expected %u",output[3],expected_length);
}
const char *expected = "id";
test_strings((const char *)(output+4),expected,expected_length);
}
int main()
{
bert_encoder_t *encoder = test_encoder(output,6);
bert_data_t *data;
if (!(data = bert_data_create_atom("id")))
{
test_fail("malloc failed");
}
test_encoder_push(encoder,data);
bert_data_destroy(data);
bert_encoder_destroy(encoder);
test_output();
return 0;
}
## Instruction:
Use EXPECTED/EXPECTED_LENGTH/OUTPUT_SIZE macros in the atom encoding test.
## Code After:
unsigned char output[OUTPUT_SIZE];
void test_output()
{
if (output[0] != BERT_MAGIC)
{
test_fail("bert_encoder_push did not add the magic byte");
}
if (output[1] != BERT_ATOM)
{
test_fail("bert_encoder_push did not add the SMALL_INT magic byte");
}
if (output[3] != EXPECTED_LENGTH)
{
test_fail("bert_encoder_push encoded %u as the atom length, expected %u",output[3],expected_length);
}
test_strings((const char *)(output+4),EXPECTED,expected_length);
}
int main()
{
bert_encoder_t *encoder = test_encoder(output,OUTPUT_SIZE);
bert_data_t *data;
if (!(data = bert_data_create_atom(EXPECTED)))
{
test_fail("malloc failed");
}
test_encoder_push(encoder,data);
bert_data_destroy(data);
bert_encoder_destroy(encoder);
test_output();
return 0;
}
|
...
unsigned char output[OUTPUT_SIZE];
void test_output()
{
...
test_fail("bert_encoder_push did not add the SMALL_INT magic byte");
}
if (output[3] != EXPECTED_LENGTH)
{
test_fail("bert_encoder_push encoded %u as the atom length, expected %u",output[3],expected_length);
}
test_strings((const char *)(output+4),EXPECTED,expected_length);
}
int main()
{
bert_encoder_t *encoder = test_encoder(output,OUTPUT_SIZE);
bert_data_t *data;
if (!(data = bert_data_create_atom(EXPECTED)))
{
test_fail("malloc failed");
}
...
|
667e5c1daf0f5e6183361e8c6ff47b34bae3bea3
|
src/uart.c
|
src/uart.c
|
void uart_putchar(char c, FILE *stream) {
if( c == '\n' )
uart_putchar( '\r', stream );
UDR0 = c;
loop_until_bit_is_set(UCSR0A, TXC0);
}
static FILE uart_out = FDEV_SETUP_STREAM(uart_putchar, NULL, _FDEV_SETUP_WRITE);
void uart_init(void) {
UBRR0L = BAUDRATE&0xFF;
UBRR0H = (BAUDRATE>>8);
#if USE_2X
UCSR0A |= _BV(U2X0);
#else
UCSR0A &= ~(_BV(U2X0));
#endif
UCSR0C = _BV(UCSZ01) | _BV(UCSZ00); /* 8-bit data */
UCSR0B = _BV(RXEN0) | _BV(TXEN0); /* Enable RX and TX */
stdout = &uart_out;
}
|
void uart_putchar(char c, FILE *stream) {
if( c == '\n' )
uart_putchar('\r', stream);
loop_until_bit_is_set(UCSR0A, UDRE0);
UDR0 = c;
}
static FILE uart_out = FDEV_SETUP_STREAM(uart_putchar, NULL, _FDEV_SETUP_WRITE);
void uart_init(void) {
UBRR0L = BAUDRATE&0xFF;
UBRR0H = (BAUDRATE>>8);
#if USE_2X
UCSR0A |= _BV(U2X0);
#else
UCSR0A &= ~(_BV(U2X0));
#endif
UCSR0C = _BV(UCSZ01) | _BV(UCSZ00); /* 8-bit data */
UCSR0B = _BV(RXEN0) | _BV(TXEN0); /* Enable RX and TX */
stdout = &uart_out;
}
|
Reorder UART transmission. Instead of waiting for cache to be empty and then sending the value, we first add the value to cache and then wait for transmission. This way no characters are lost during transmission
|
Reorder UART transmission. Instead of waiting for cache to be empty and then sending the value, we first add the value to cache and then wait for transmission. This way no characters are lost during transmission
|
C
|
mit
|
nathanhi/steep-beta,nathanhi/steep-beta
|
c
|
## Code Before:
void uart_putchar(char c, FILE *stream) {
if( c == '\n' )
uart_putchar( '\r', stream );
UDR0 = c;
loop_until_bit_is_set(UCSR0A, TXC0);
}
static FILE uart_out = FDEV_SETUP_STREAM(uart_putchar, NULL, _FDEV_SETUP_WRITE);
void uart_init(void) {
UBRR0L = BAUDRATE&0xFF;
UBRR0H = (BAUDRATE>>8);
#if USE_2X
UCSR0A |= _BV(U2X0);
#else
UCSR0A &= ~(_BV(U2X0));
#endif
UCSR0C = _BV(UCSZ01) | _BV(UCSZ00); /* 8-bit data */
UCSR0B = _BV(RXEN0) | _BV(TXEN0); /* Enable RX and TX */
stdout = &uart_out;
}
## Instruction:
Reorder UART transmission. Instead of waiting for cache to be empty and then sending the value, we first add the value to cache and then wait for transmission. This way no characters are lost during transmission
## Code After:
void uart_putchar(char c, FILE *stream) {
if( c == '\n' )
uart_putchar('\r', stream);
loop_until_bit_is_set(UCSR0A, UDRE0);
UDR0 = c;
}
static FILE uart_out = FDEV_SETUP_STREAM(uart_putchar, NULL, _FDEV_SETUP_WRITE);
void uart_init(void) {
UBRR0L = BAUDRATE&0xFF;
UBRR0H = (BAUDRATE>>8);
#if USE_2X
UCSR0A |= _BV(U2X0);
#else
UCSR0A &= ~(_BV(U2X0));
#endif
UCSR0C = _BV(UCSZ01) | _BV(UCSZ00); /* 8-bit data */
UCSR0B = _BV(RXEN0) | _BV(TXEN0); /* Enable RX and TX */
stdout = &uart_out;
}
|
// ... existing code ...
void uart_putchar(char c, FILE *stream) {
if( c == '\n' )
uart_putchar('\r', stream);
loop_until_bit_is_set(UCSR0A, UDRE0);
UDR0 = c;
}
static FILE uart_out = FDEV_SETUP_STREAM(uart_putchar, NULL, _FDEV_SETUP_WRITE);
// ... rest of the code ...
|
8c6940a82b4504786e221f0603b8995db41adcae
|
reddit2telegram/channels/r_wholesome/app.py
|
reddit2telegram/channels/r_wholesome/app.py
|
subreddit = 'wholesome'
t_channel = '@r_wholesome'
def send_post(submission, r2t):
return r2t.send_simple(submission)
|
subreddit = 'wholesome+WholesomeComics+wholesomegifs+wholesomepics+wholesomememes'
t_channel = '@r_wholesome'
def send_post(submission, r2t):
return r2t.send_simple(submission)
|
Add a few subreddits to @r_wholesome
|
Add a few subreddits to @r_wholesome
|
Python
|
mit
|
Fillll/reddit2telegram,Fillll/reddit2telegram
|
python
|
## Code Before:
subreddit = 'wholesome'
t_channel = '@r_wholesome'
def send_post(submission, r2t):
return r2t.send_simple(submission)
## Instruction:
Add a few subreddits to @r_wholesome
## Code After:
subreddit = 'wholesome+WholesomeComics+wholesomegifs+wholesomepics+wholesomememes'
t_channel = '@r_wholesome'
def send_post(submission, r2t):
return r2t.send_simple(submission)
|
...
subreddit = 'wholesome+WholesomeComics+wholesomegifs+wholesomepics+wholesomememes'
t_channel = '@r_wholesome'
...
|
8f3249904ede8e6ac4fd1398f3d059335a65c8c6
|
galpy/df.py
|
galpy/df.py
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
Add impulse functions to top level
|
Add impulse functions to top level
|
Python
|
bsd-3-clause
|
jobovy/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy
|
python
|
## Code Before:
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
## Instruction:
Add impulse functions to top level
## Code After:
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
# ... existing code ...
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
# ... rest of the code ...
|
eb79cce84fbb9d801d6f5087b9216e66d56bfa51
|
scripts/generate_global_kwargs_doc.py
|
scripts/generate_global_kwargs_doc.py
|
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
Include defaults in generated global args list.
|
Include defaults in generated global args list.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
python
|
## Code Before:
from os import path
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
## Instruction:
Include defaults in generated global args list.
## Code After:
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
lines = []
for category, kwarg_configs in OPERATION_KWARGS.items():
if category is None:
continue
lines.append('{0}:'.format(category))
for key, config in kwarg_configs.items():
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
module_filename = path.join(docs_dir, '_deploy_globals.rst')
print('--> Writing {0}'.format(module_filename))
out = '\n'.join(lines)
with open(module_filename, 'w') as outfile:
outfile.write(out)
if __name__ == '__main__':
print('### Building global kwargs doc')
build_global_kwargs_doc()
|
# ... existing code ...
from os import path
from pyinfra.api import Config
from pyinfra.api.operation_kwargs import OPERATION_KWARGS
def build_global_kwargs_doc():
pyinfra_config = Config()
this_dir = path.dirname(path.realpath(__file__))
docs_dir = path.abspath(path.join(this_dir, '..', 'docs'))
# ... modified code ...
description = config
if isinstance(config, dict):
description = config.get('description')
default = config.get('default')
if callable(default):
default = default(pyinfra_config)
if default:
key = '{0}={1}'.format(key, default)
lines.append(' + ``{0}``: {1}'.format(key, description))
# ... rest of the code ...
|
b4c249b8e09ca0fcc9a043ad6fafba0a29a62a38
|
src/main/java/org/nanopub/CustomTrigWriter.java
|
src/main/java/org/nanopub/CustomTrigWriter.java
|
package org.nanopub;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import org.openrdf.model.URI;
import org.openrdf.rio.trig.TriGWriter;
/**
* @author Tobias Kuhn
*/
public class CustomTrigWriter extends TriGWriter {
public CustomTrigWriter(OutputStream out) {
super(out);
}
public CustomTrigWriter(Writer writer) {
super(writer);
}
protected void writeURI(URI uri) throws IOException {
String prefix = namespaceTable.get(uri.toString());
if (prefix != null) {
writer.write(prefix);
writer.write(":");
} else {
super.writeURI(uri);
}
}
}
|
package org.nanopub;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import org.openrdf.model.URI;
import org.openrdf.rio.trig.TriGWriter;
import org.openrdf.rio.turtle.TurtleUtil;
/**
* @author Tobias Kuhn
*/
public class CustomTrigWriter extends TriGWriter {
public CustomTrigWriter(OutputStream out) {
super(out);
}
public CustomTrigWriter(Writer writer) {
super(writer);
}
@Override
protected void writeURI(URI uri) throws IOException {
String uriString = uri.toString();
String prefix = namespaceTable.get(uriString);
if (prefix != null) {
// Exact match: no suffix required
writer.write(prefix);
writer.write(":");
return;
}
prefix = null;
int splitIdxNorm = TurtleUtil.findURISplitIndex(uriString);
// Do also split at dots:
int splitIdxDot = uriString.lastIndexOf(".") + 1;
if (uriString.length() == splitIdxDot) splitIdxDot = -1;
int splitIdx = Math.max(splitIdxNorm, splitIdxDot);
if (splitIdx > 0) {
String namespace = uriString.substring(0, splitIdx);
prefix = namespaceTable.get(namespace);
}
if (prefix != null) {
// Namespace is mapped to a prefix; write abbreviated URI
writer.write(prefix);
writer.write(":");
writer.write(uriString.substring(splitIdx));
} else {
// Write full URI
writer.write("<");
writer.write(TurtleUtil.encodeURIString(uriString));
writer.write(">");
}
}
}
|
Make sure prefixes are split at '.' when writing TriG
|
Make sure prefixes are split at '.' when writing TriG
|
Java
|
mit
|
Nanopublication/nanopub-java,Nanopublication/nanopub-java
|
java
|
## Code Before:
package org.nanopub;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import org.openrdf.model.URI;
import org.openrdf.rio.trig.TriGWriter;
/**
* @author Tobias Kuhn
*/
public class CustomTrigWriter extends TriGWriter {
public CustomTrigWriter(OutputStream out) {
super(out);
}
public CustomTrigWriter(Writer writer) {
super(writer);
}
protected void writeURI(URI uri) throws IOException {
String prefix = namespaceTable.get(uri.toString());
if (prefix != null) {
writer.write(prefix);
writer.write(":");
} else {
super.writeURI(uri);
}
}
}
## Instruction:
Make sure prefixes are split at '.' when writing TriG
## Code After:
package org.nanopub;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import org.openrdf.model.URI;
import org.openrdf.rio.trig.TriGWriter;
import org.openrdf.rio.turtle.TurtleUtil;
/**
* @author Tobias Kuhn
*/
public class CustomTrigWriter extends TriGWriter {
public CustomTrigWriter(OutputStream out) {
super(out);
}
public CustomTrigWriter(Writer writer) {
super(writer);
}
@Override
protected void writeURI(URI uri) throws IOException {
String uriString = uri.toString();
String prefix = namespaceTable.get(uriString);
if (prefix != null) {
// Exact match: no suffix required
writer.write(prefix);
writer.write(":");
return;
}
prefix = null;
int splitIdxNorm = TurtleUtil.findURISplitIndex(uriString);
// Do also split at dots:
int splitIdxDot = uriString.lastIndexOf(".") + 1;
if (uriString.length() == splitIdxDot) splitIdxDot = -1;
int splitIdx = Math.max(splitIdxNorm, splitIdxDot);
if (splitIdx > 0) {
String namespace = uriString.substring(0, splitIdx);
prefix = namespaceTable.get(namespace);
}
if (prefix != null) {
// Namespace is mapped to a prefix; write abbreviated URI
writer.write(prefix);
writer.write(":");
writer.write(uriString.substring(splitIdx));
} else {
// Write full URI
writer.write("<");
writer.write(TurtleUtil.encodeURIString(uriString));
writer.write(">");
}
}
}
|
// ... existing code ...
import org.openrdf.model.URI;
import org.openrdf.rio.trig.TriGWriter;
import org.openrdf.rio.turtle.TurtleUtil;
/**
* @author Tobias Kuhn
// ... modified code ...
super(writer);
}
@Override
protected void writeURI(URI uri) throws IOException {
String uriString = uri.toString();
String prefix = namespaceTable.get(uriString);
if (prefix != null) {
// Exact match: no suffix required
writer.write(prefix);
writer.write(":");
return;
}
prefix = null;
int splitIdxNorm = TurtleUtil.findURISplitIndex(uriString);
// Do also split at dots:
int splitIdxDot = uriString.lastIndexOf(".") + 1;
if (uriString.length() == splitIdxDot) splitIdxDot = -1;
int splitIdx = Math.max(splitIdxNorm, splitIdxDot);
if (splitIdx > 0) {
String namespace = uriString.substring(0, splitIdx);
prefix = namespaceTable.get(namespace);
}
if (prefix != null) {
// Namespace is mapped to a prefix; write abbreviated URI
writer.write(prefix);
writer.write(":");
writer.write(uriString.substring(splitIdx));
} else {
// Write full URI
writer.write("<");
writer.write(TurtleUtil.encodeURIString(uriString));
writer.write(">");
}
}
// ... rest of the code ...
|
3fb1800548ad421520bf3f2845aad4f51f6f5839
|
rapidsms_multimodem/tests/__init__.py
|
rapidsms_multimodem/tests/__init__.py
|
from test_utils import * # noqa
from test_views import * # noqa
|
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
Add import for older versions of Django
|
Add import for older versions of Django
|
Python
|
bsd-3-clause
|
caktus/rapidsms-multimodem
|
python
|
## Code Before:
from test_utils import * # noqa
from test_views import * # noqa
## Instruction:
Add import for older versions of Django
## Code After:
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
// ... existing code ...
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
// ... rest of the code ...
|
c5167530d6b815d9538254ef03dd87c4f8497f97
|
org/postgresql/test/xa/XATestSuite.java
|
org/postgresql/test/xa/XATestSuite.java
|
package org.postgresql.test.xa;
import java.sql.Connection;
import junit.framework.TestSuite;
import org.postgresql.test.TestUtil;
public class XATestSuite extends TestSuite {
public static TestSuite suite() throws Exception {
Class.forName("org.postgresql.Driver");
TestSuite suite = new TestSuite();
Connection connection = TestUtil.openDB();
try
{
if (TestUtil.haveMinimumServerVersion(connection, "8.1"))
{
suite.addTestSuite(XADataSourceTest.class);
}
}
finally
{
connection.close();
}
return suite;
}
}
|
package org.postgresql.test.xa;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.ResultSet;
import junit.framework.TestSuite;
import org.postgresql.test.TestUtil;
public class XATestSuite extends TestSuite {
public static TestSuite suite() throws Exception {
Class.forName("org.postgresql.Driver");
TestSuite suite = new TestSuite();
Connection connection = TestUtil.openDB();
try
{
if (TestUtil.haveMinimumServerVersion(connection, "8.1"))
{
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SHOW max_prepared_transactions");
rs.next();
int mpt = rs.getInt(1);
if (mpt > 0) {
suite.addTestSuite(XADataSourceTest.class);
} else {
System.out.println("Skipping XA tests because max_prepared_transactions = 0.");
}
rs.close();
stmt.close();
}
}
finally
{
connection.close();
}
return suite;
}
}
|
Disable the XA tests if the server doesn't allow any prepared transactions, which is the default starting with the 8.4 release.
|
Disable the XA tests if the server doesn't allow any prepared
transactions, which is the default starting with the 8.4 release.
|
Java
|
bsd-2-clause
|
sehrope/pgjdbc,golovnin/pgjdbc,marschall/pgjdbc,AlexElin/pgjdbc,amozhenin/pgjdbc,sehrope/pgjdbc,panchenko/pgjdbc,ekoontz/pgjdbc,jorsol/pgjdbc,jamesthomp/pgjdbc,jorsol/pgjdbc,schlosna/pgjdbc,pgjdbc/pgjdbc,jkutner/pgjdbc,jkutner/pgjdbc,thkoch2001/libpostgresql-jdbc-java,amozhenin/pgjdbc,thkoch2001/libpostgresql-jdbc-java,zemian/pgjdbc,jorsol/pgjdbc,whitingjr/pgjdbc,Gordiychuk/pgjdbc,lordnelson/pgjdbc,panchenko/pgjdbc,alexismeneses/pgjdbc,lordnelson/pgjdbc,jorsol/pgjdbc,Gordiychuk/pgjdbc,bocap/pgjdbc,davecramer/pgjdbc,underyx/pgjdbc,alexismeneses/pgjdbc,sehrope/pgjdbc,marschall/pgjdbc,underyx/pgjdbc,rjmac/pgjdbc,davecramer/pgjdbc,golovnin/pgjdbc,rjmac/pgjdbc,jamesthomp/pgjdbc,thkoch2001/libpostgresql-jdbc-java,marschall/pgjdbc,panchenko/pgjdbc,Gordiychuk/pgjdbc,amozhenin/pgjdbc,tivv/pgjdbc,zemian/pgjdbc,zapov/pgjdbc,AlexElin/pgjdbc,zapov/pgjdbc,jkutner/pgjdbc,AlexElin/pgjdbc,golovnin/pgjdbc,ekoontz/pgjdbc,pgjdbc/pgjdbc,whitingjr/pgjdbc,zapov/pgjdbc,sehrope/pgjdbc,phillipross/pgjdbc,phillipross/pgjdbc,pgjdbc/pgjdbc,davecramer/pgjdbc,underyx/pgjdbc,davecramer/pgjdbc,jamesthomp/pgjdbc,marschall/pgjdbc,lordnelson/pgjdbc,tivv/pgjdbc,bocap/pgjdbc,zemian/pgjdbc,schlosna/pgjdbc,lonnyj/pgjdbc,pgjdbc/pgjdbc,rjmac/pgjdbc,whitingjr/pgjdbc,lonnyj/pgjdbc,bocap/pgjdbc,phillipross/pgjdbc
|
java
|
## Code Before:
package org.postgresql.test.xa;
import java.sql.Connection;
import junit.framework.TestSuite;
import org.postgresql.test.TestUtil;
public class XATestSuite extends TestSuite {
public static TestSuite suite() throws Exception {
Class.forName("org.postgresql.Driver");
TestSuite suite = new TestSuite();
Connection connection = TestUtil.openDB();
try
{
if (TestUtil.haveMinimumServerVersion(connection, "8.1"))
{
suite.addTestSuite(XADataSourceTest.class);
}
}
finally
{
connection.close();
}
return suite;
}
}
## Instruction:
Disable the XA tests if the server doesn't allow any prepared
transactions, which is the default starting with the 8.4 release.
## Code After:
package org.postgresql.test.xa;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.ResultSet;
import junit.framework.TestSuite;
import org.postgresql.test.TestUtil;
public class XATestSuite extends TestSuite {
public static TestSuite suite() throws Exception {
Class.forName("org.postgresql.Driver");
TestSuite suite = new TestSuite();
Connection connection = TestUtil.openDB();
try
{
if (TestUtil.haveMinimumServerVersion(connection, "8.1"))
{
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SHOW max_prepared_transactions");
rs.next();
int mpt = rs.getInt(1);
if (mpt > 0) {
suite.addTestSuite(XADataSourceTest.class);
} else {
System.out.println("Skipping XA tests because max_prepared_transactions = 0.");
}
rs.close();
stmt.close();
}
}
finally
{
connection.close();
}
return suite;
}
}
|
// ... existing code ...
package org.postgresql.test.xa;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.ResultSet;
import junit.framework.TestSuite;
// ... modified code ...
{
if (TestUtil.haveMinimumServerVersion(connection, "8.1"))
{
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SHOW max_prepared_transactions");
rs.next();
int mpt = rs.getInt(1);
if (mpt > 0) {
suite.addTestSuite(XADataSourceTest.class);
} else {
System.out.println("Skipping XA tests because max_prepared_transactions = 0.");
}
rs.close();
stmt.close();
}
}
finally
// ... rest of the code ...
|
03671a01cb5ea359c22e954a8381bbfd30bce094
|
lc560_subarray_sum_equals_k.py
|
lc560_subarray_sum_equals_k.py
|
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
|
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
Complete naive solution by nested for loops
|
Complete naive solution by nested for loops
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
python
|
## Code Before:
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
pass
def main():
import time
nums = [1,1,1]
k = 2
if __name__ == '__main__':
main()
## Instruction:
Complete naive solution by nested for loops
## Code After:
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
main()
|
...
class SolutionNaive(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
Time complexity: O(n^3).
Space complexity: O(n).
"""
count = 0
for i in range(len(nums)):
for j in range(i, len(nums)):
if sum(nums[i:(j + 1)]) == k:
count += 1
return count
def main():
import time
nums = [1, 1, 1]
k = 2
print SolutionNaive().subarraySum(nums, k)
nums = [10, 2, -2, -20, 10]
k = -10
print SolutionNaive().subarraySum(nums, k)
if __name__ == '__main__':
...
|
9da303e48820e95e1bfd206f1c0372f896dac6ec
|
draftjs_exporter/constants.py
|
draftjs_exporter/constants.py
|
from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, tuple_list):
self.tuple_list = tuple_list
def __getattr__(self, name):
if name not in self.tuple_list:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# https://github.com/draft-js-utils/draft-js-utils/blob/master/src/Constants.js
class BLOCK_TYPES:
UNSTYLED = 'unstyled'
HEADER_ONE = 'header-one'
HEADER_TWO = 'header-two'
HEADER_THREE = 'header-three'
HEADER_FOUR = 'header-four'
HEADER_FIVE = 'header-five'
HEADER_SIX = 'header-six'
UNORDERED_LIST_ITEM = 'unordered-list-item'
ORDERED_LIST_ITEM = 'ordered-list-item'
BLOCKQUOTE = 'blockquote'
PULLQUOTE = 'pullquote'
CODE = 'code-block'
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum(('LINK', 'IMAGE', 'TOKEN'))
INLINE_STYLES = Enum(('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE'))
|
from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, *elements):
self.elements = tuple(elements)
def __getattr__(self, name):
if name not in self.elements:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# https://github.com/draft-js-utils/draft-js-utils/blob/master/src/Constants.js
class BLOCK_TYPES:
UNSTYLED = 'unstyled'
HEADER_ONE = 'header-one'
HEADER_TWO = 'header-two'
HEADER_THREE = 'header-three'
HEADER_FOUR = 'header-four'
HEADER_FIVE = 'header-five'
HEADER_SIX = 'header-six'
UNORDERED_LIST_ITEM = 'unordered-list-item'
ORDERED_LIST_ITEM = 'ordered-list-item'
BLOCKQUOTE = 'blockquote'
PULLQUOTE = 'pullquote'
CODE = 'code-block'
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum('LINK', 'IMAGE', 'TOKEN')
INLINE_STYLES = Enum('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE')
|
Allow enum to be created more easily
|
Allow enum to be created more easily
|
Python
|
mit
|
springload/draftjs_exporter,springload/draftjs_exporter,springload/draftjs_exporter
|
python
|
## Code Before:
from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, tuple_list):
self.tuple_list = tuple_list
def __getattr__(self, name):
if name not in self.tuple_list:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# https://github.com/draft-js-utils/draft-js-utils/blob/master/src/Constants.js
class BLOCK_TYPES:
UNSTYLED = 'unstyled'
HEADER_ONE = 'header-one'
HEADER_TWO = 'header-two'
HEADER_THREE = 'header-three'
HEADER_FOUR = 'header-four'
HEADER_FIVE = 'header-five'
HEADER_SIX = 'header-six'
UNORDERED_LIST_ITEM = 'unordered-list-item'
ORDERED_LIST_ITEM = 'ordered-list-item'
BLOCKQUOTE = 'blockquote'
PULLQUOTE = 'pullquote'
CODE = 'code-block'
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum(('LINK', 'IMAGE', 'TOKEN'))
INLINE_STYLES = Enum(('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE'))
## Instruction:
Allow enum to be created more easily
## Code After:
from __future__ import absolute_import, unicode_literals
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, *elements):
self.elements = tuple(elements)
def __getattr__(self, name):
if name not in self.elements:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# https://github.com/draft-js-utils/draft-js-utils/blob/master/src/Constants.js
class BLOCK_TYPES:
UNSTYLED = 'unstyled'
HEADER_ONE = 'header-one'
HEADER_TWO = 'header-two'
HEADER_THREE = 'header-three'
HEADER_FOUR = 'header-four'
HEADER_FIVE = 'header-five'
HEADER_SIX = 'header-six'
UNORDERED_LIST_ITEM = 'unordered-list-item'
ORDERED_LIST_ITEM = 'ordered-list-item'
BLOCKQUOTE = 'blockquote'
PULLQUOTE = 'pullquote'
CODE = 'code-block'
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum('LINK', 'IMAGE', 'TOKEN')
INLINE_STYLES = Enum('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE')
|
# ... existing code ...
# http://stackoverflow.com/a/22723724/1798491
class Enum(object):
def __init__(self, *elements):
self.elements = tuple(elements)
def __getattr__(self, name):
if name not in self.elements:
raise AttributeError("'Enum' has no attribute '{}'".format(name))
return name
# ... modified code ...
ATOMIC = 'atomic'
HORIZONTAL_RULE = 'horizontal-rule'
ENTITY_TYPES = Enum('LINK', 'IMAGE', 'TOKEN')
INLINE_STYLES = Enum('BOLD', 'CODE', 'ITALIC', 'STRIKETHROUGH', 'UNDERLINE')
# ... rest of the code ...
|
41cf41f501b715902cf180b5a2f62ce16a816f30
|
oscar/core/prices.py
|
oscar/core/prices.py
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
def __repr__(self):
if self.is_tax_known:
return "%s(currency=%r, excl_tax=%r, incl_tax=%r, tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax,
self.incl_tax, self.tax)
return "%s(currency=%r, excl_tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax)
|
Define __repr__ for the core Price class
|
Define __repr__ for the core Price class
|
Python
|
bsd-3-clause
|
saadatqadri/django-oscar,WillisXChen/django-oscar,adamend/django-oscar,sasha0/django-oscar,faratro/django-oscar,bnprk/django-oscar,jinnykoo/christmas,jinnykoo/wuyisj.com,WillisXChen/django-oscar,WadeYuChen/django-oscar,taedori81/django-oscar,taedori81/django-oscar,bschuon/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,thechampanurag/django-oscar,pasqualguerrero/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,bschuon/django-oscar,QLGu/django-oscar,sonofatailor/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,binarydud/django-oscar,marcoantoniooliveira/labweb,nfletton/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,josesanch/django-oscar,nfletton/django-oscar,Bogh/django-oscar,Bogh/django-oscar,john-parton/django-oscar,sasha0/django-oscar,binarydud/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,lijoantony/django-oscar,taedori81/django-oscar,QLGu/django-oscar,josesanch/django-oscar,dongguangming/django-oscar,solarissmoke/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,eddiep1101/django-oscar,okfish/django-oscar,faratro/django-oscar,jlmadurga/django-oscar,pdonadeo/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,vovanbo/django-oscar,dongguangming/django-oscar,john-parton/django-oscar,thechampanurag/django-oscar,kapari/django-oscar,solarissmoke/django-oscar,jmt4/django-oscar,pdonadeo/django-oscar,bnprk/django-oscar,ka7eh/django-oscar,machtfit/django-oscar,manevant/django-oscar,saadatqadri/django-oscar,jinnykoo/wuyisj.com,mexeniz/django-oscar,ka7eh/django-oscar,thechampanurag/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,monikasulik/django-oscar,faratro/django-oscar,Jannes123/django-oscar,sonofatailor/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,kapt/django-oscar,binarydud/django-oscar,Bogh/django-oscar,WillisXChen/django-oscar,saadatqadri/django-oscar,nickpack/django-oscar,MatthewWilkes/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,spartonia/django-oscar,sonofatailor/django-oscar,kapari/django-oscar,nickpack/django-oscar,Bogh/django-oscar,taedori81/django-oscar,jinnykoo/christmas,okfish/django-oscar,manevant/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,itbabu/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,jlmadurga/django-oscar,john-parton/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,nfletton/django-oscar,vovanbo/django-oscar,Jannes123/django-oscar,spartonia/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,jmt4/django-oscar,pdonadeo/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,mexeniz/django-oscar,rocopartners/django-oscar,manevant/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,eddiep1101/django-oscar,bnprk/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,rocopartners/django-oscar,jinnykoo/wuyisj,itbabu/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,bschuon/django-oscar,jinnykoo/christmas,adamend/django-oscar,marcoantoniooliveira/labweb,amirrpp/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,DrOctogon/unwash_ecom,amirrpp/django-oscar,kapt/django-oscar,ahmetdaglarbas/e-commerce,marcoantoniooliveira/labweb,ademuk/django-oscar,jlmadurga/django-oscar,manevant/django-oscar,pasqualguerrero/django-oscar,pasqualguerrero/django-oscar,anentropic/django-oscar,jmt4/django-oscar,WillisXChen/django-oscar,kapt/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,thechampanurag/django-oscar,ahmetdaglarbas/e-commerce,okfish/django-oscar,pdonadeo/django-oscar,sasha0/django-oscar,spartonia/django-oscar,adamend/django-oscar,lijoantony/django-oscar,jinnykoo/wuyisj,MatthewWilkes/django-oscar,solarissmoke/django-oscar,faratro/django-oscar,mexeniz/django-oscar,spartonia/django-oscar,sasha0/django-oscar,josesanch/django-oscar,machtfit/django-oscar,vovanbo/django-oscar,john-parton/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,django-oscar/django-oscar,nickpack/django-oscar,adamend/django-oscar,rocopartners/django-oscar,pasqualguerrero/django-oscar,ademuk/django-oscar,QLGu/django-oscar,amirrpp/django-oscar,ahmetdaglarbas/e-commerce,bnprk/django-oscar,Jannes123/django-oscar
|
python
|
## Code Before:
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
## Instruction:
Define __repr__ for the core Price class
## Code After:
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
def __repr__(self):
if self.is_tax_known:
return "%s(currency=%r, excl_tax=%r, incl_tax=%r, tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax,
self.incl_tax, self.tax)
return "%s(currency=%r, excl_tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax)
|
// ... existing code ...
self.tax = tax
else:
self.is_tax_known = False
def __repr__(self):
if self.is_tax_known:
return "%s(currency=%r, excl_tax=%r, incl_tax=%r, tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax,
self.incl_tax, self.tax)
return "%s(currency=%r, excl_tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax)
// ... rest of the code ...
|
0260e50ab4d1449fa95b8e712861b7e44ac21965
|
umessages/appsettings.py
|
umessages/appsettings.py
|
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
Use bootstrap template pack by default
|
Use bootstrap template pack by default
|
Python
|
bsd-3-clause
|
euanlau/django-umessages,euanlau/django-umessages
|
python
|
## Code Before:
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
## Instruction:
Use bootstrap template pack by default
## Code After:
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
# ... existing code ...
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
# ... rest of the code ...
|
8496ba409b9a340858e4473157aab87593868db7
|
pytask/views.py
|
pytask/views.py
|
from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
Use the right name for the profile role's values.
|
Use the right name for the profile role's values.
|
Python
|
agpl-3.0
|
madhusudancs/pytask,madhusudancs/pytask,madhusudancs/pytask
|
python
|
## Code Before:
from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
## Instruction:
Use the right name for the profile role's values.
## Code After:
from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
// ... existing code ...
from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
// ... modified code ...
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
// ... rest of the code ...
|
aa2916ff583a12e068251d36d0045c68e5895a5c
|
mudlib/mud/home/System/sys/extinguishd.c
|
mudlib/mud/home/System/sys/extinguishd.c
|
inherit SECOND_AUTO;
void extinguish(string path)
{
ACCESS_CHECK(KADMIN());
call_out("purge", 0, path, status(ST_OTABSIZE));
}
static void purge(string path, int quota)
{
int limit;
limit = 200;
if (quota % limit != 0) {
limit = quota % limit;
}
for (; quota > 0 && limit > 0; quota--, limit--) {
object obj;
if (obj = find_object(path + "#" + quota)) {
destruct_object(obj);
}
}
LOGD->post_message("test", LOG_INFO, quota + " objects to check.");
if (quota > 0) {
call_out("purge", 0, path, quota);
}
}
|
inherit SECOND_AUTO;
void extinguish(string path)
{
if (!KADMIN()) {
string opath;
opath = object_name(previous_object());
ACCESS_CHECK(DRIVER->creator(opath));
ACCESS_CHECK(DRIVER->creator(opath) == DRIVER->creator(path));
}
call_out("purge", 0, path, status(ST_OTABSIZE));
}
static void purge(string path, int quota)
{
int limit;
limit = 128;
if (quota % limit != 0) {
limit = quota % limit;
}
for (; quota > 0 && limit > 0; quota--, limit--) {
object obj;
if (obj = find_object(path + "#" + quota)) {
destruct_object(obj);
}
}
LOGD->post_message("test", LOG_INFO, quota + " objects to check.");
if (quota > 0) {
call_out("purge", 0, path, quota);
}
}
|
Allow extinguishing of objects with the same creator as yourself.
|
Allow extinguishing of objects with the same creator as yourself.
|
C
|
agpl-3.0
|
shentino/kotaka,shentino/kotaka,shentino/kotaka
|
c
|
## Code Before:
inherit SECOND_AUTO;
void extinguish(string path)
{
ACCESS_CHECK(KADMIN());
call_out("purge", 0, path, status(ST_OTABSIZE));
}
static void purge(string path, int quota)
{
int limit;
limit = 200;
if (quota % limit != 0) {
limit = quota % limit;
}
for (; quota > 0 && limit > 0; quota--, limit--) {
object obj;
if (obj = find_object(path + "#" + quota)) {
destruct_object(obj);
}
}
LOGD->post_message("test", LOG_INFO, quota + " objects to check.");
if (quota > 0) {
call_out("purge", 0, path, quota);
}
}
## Instruction:
Allow extinguishing of objects with the same creator as yourself.
## Code After:
inherit SECOND_AUTO;
void extinguish(string path)
{
if (!KADMIN()) {
string opath;
opath = object_name(previous_object());
ACCESS_CHECK(DRIVER->creator(opath));
ACCESS_CHECK(DRIVER->creator(opath) == DRIVER->creator(path));
}
call_out("purge", 0, path, status(ST_OTABSIZE));
}
static void purge(string path, int quota)
{
int limit;
limit = 128;
if (quota % limit != 0) {
limit = quota % limit;
}
for (; quota > 0 && limit > 0; quota--, limit--) {
object obj;
if (obj = find_object(path + "#" + quota)) {
destruct_object(obj);
}
}
LOGD->post_message("test", LOG_INFO, quota + " objects to check.");
if (quota > 0) {
call_out("purge", 0, path, quota);
}
}
|
# ... existing code ...
void extinguish(string path)
{
if (!KADMIN()) {
string opath;
opath = object_name(previous_object());
ACCESS_CHECK(DRIVER->creator(opath));
ACCESS_CHECK(DRIVER->creator(opath) == DRIVER->creator(path));
}
call_out("purge", 0, path, status(ST_OTABSIZE));
}
# ... modified code ...
{
int limit;
limit = 128;
if (quota % limit != 0) {
limit = quota % limit;
# ... rest of the code ...
|
ddb3bcf4e5d5eb5dc4f8bb74313f333e54c385d6
|
scripts/wall_stop.py
|
scripts/wall_stop.py
|
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
Reduce the name of a function
|
Reduce the name of a function
|
Python
|
mit
|
citueda/pimouse_run_corridor,citueda/pimouse_run_corridor
|
python
|
## Code Before:
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
## Instruction:
Reduce the name of a function
## Code After:
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
...
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
...
|
5b1a80efaa3f754f9f99413441dc13e1ac55a1b2
|
server/src/test/java/tests/ConfigurationAssert.java
|
server/src/test/java/tests/ConfigurationAssert.java
|
package tests;
import static org.fest.assertions.Assertions.assertThat;
import org.fest.assertions.GenericAssert;
import org.kercoin.magrit.Configuration;
import org.kercoin.magrit.Configuration.Authentication;
public class ConfigurationAssert extends GenericAssert<ConfigurationAssert, Configuration> {
ConfigurationAssert(Class<ConfigurationAssert> selfType,
Configuration actual) {
super(selfType, actual);
}
public ConfigurationAssert onPort(int expected) {
assertThat(actual.getSshPort()).isEqualTo(expected);
return this;
}
public ConfigurationAssert hasHomeDir(String absolutePath) {
assertThat(actual.getRepositoriesHomeDir().getAbsolutePath()).isEqualTo(absolutePath);
return this;
}
public ConfigurationAssert hasWorkDir(String absolutePath) {
assertThat(actual.getWorkHomeDir().getAbsolutePath()).isEqualTo(absolutePath);
return this;
}
public ConfigurationAssert hasPublickeyDir(String absolutePath) {
assertThat(actual.getPublickeyRepositoryDir().getAbsolutePath()).isEqualTo(absolutePath);
return this;
}
public ConfigurationAssert hasAuthentication(Authentication expected) {
assertThat(actual.getAuthentication()).isEqualTo(expected);
return this;
}
public ConfigurationAssert isRemoteAllowed(boolean expected) {
assertThat(actual.isRemoteAllowed()).isEqualTo(expected);
return this;
}
}
|
package tests;
import static org.fest.assertions.Assertions.assertThat;
import java.io.File;
import org.fest.assertions.GenericAssert;
import org.kercoin.magrit.Configuration;
import org.kercoin.magrit.Configuration.Authentication;
public class ConfigurationAssert extends GenericAssert<ConfigurationAssert, Configuration> {
ConfigurationAssert(Class<ConfigurationAssert> selfType,
Configuration actual) {
super(selfType, actual);
}
public ConfigurationAssert onPort(int expected) {
assertThat(actual.getSshPort()).isEqualTo(expected);
return this;
}
public ConfigurationAssert hasHomeDir(String absolutePath) {
assertThat(actual.getRepositoriesHomeDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasWorkDir(String absolutePath) {
assertThat(actual.getWorkHomeDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasPublickeyDir(String absolutePath) {
assertThat(actual.getPublickeyRepositoryDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasAuthentication(Authentication expected) {
assertThat(actual.getAuthentication()).isEqualTo(expected);
return this;
}
public ConfigurationAssert isRemoteAllowed(boolean expected) {
assertThat(actual.isRemoteAllowed()).isEqualTo(expected);
return this;
}
private static String cleanPath(String absolutePath) {
return new File(absolutePath).getPath();
}
}
|
Clean paths containing double /
|
Clean paths containing double /
|
Java
|
agpl-3.0
|
ptitfred/magrit,ptitfred/magrit
|
java
|
## Code Before:
package tests;
import static org.fest.assertions.Assertions.assertThat;
import org.fest.assertions.GenericAssert;
import org.kercoin.magrit.Configuration;
import org.kercoin.magrit.Configuration.Authentication;
public class ConfigurationAssert extends GenericAssert<ConfigurationAssert, Configuration> {
ConfigurationAssert(Class<ConfigurationAssert> selfType,
Configuration actual) {
super(selfType, actual);
}
public ConfigurationAssert onPort(int expected) {
assertThat(actual.getSshPort()).isEqualTo(expected);
return this;
}
public ConfigurationAssert hasHomeDir(String absolutePath) {
assertThat(actual.getRepositoriesHomeDir().getAbsolutePath()).isEqualTo(absolutePath);
return this;
}
public ConfigurationAssert hasWorkDir(String absolutePath) {
assertThat(actual.getWorkHomeDir().getAbsolutePath()).isEqualTo(absolutePath);
return this;
}
public ConfigurationAssert hasPublickeyDir(String absolutePath) {
assertThat(actual.getPublickeyRepositoryDir().getAbsolutePath()).isEqualTo(absolutePath);
return this;
}
public ConfigurationAssert hasAuthentication(Authentication expected) {
assertThat(actual.getAuthentication()).isEqualTo(expected);
return this;
}
public ConfigurationAssert isRemoteAllowed(boolean expected) {
assertThat(actual.isRemoteAllowed()).isEqualTo(expected);
return this;
}
}
## Instruction:
Clean paths containing double /
## Code After:
package tests;
import static org.fest.assertions.Assertions.assertThat;
import java.io.File;
import org.fest.assertions.GenericAssert;
import org.kercoin.magrit.Configuration;
import org.kercoin.magrit.Configuration.Authentication;
public class ConfigurationAssert extends GenericAssert<ConfigurationAssert, Configuration> {
ConfigurationAssert(Class<ConfigurationAssert> selfType,
Configuration actual) {
super(selfType, actual);
}
public ConfigurationAssert onPort(int expected) {
assertThat(actual.getSshPort()).isEqualTo(expected);
return this;
}
public ConfigurationAssert hasHomeDir(String absolutePath) {
assertThat(actual.getRepositoriesHomeDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasWorkDir(String absolutePath) {
assertThat(actual.getWorkHomeDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasPublickeyDir(String absolutePath) {
assertThat(actual.getPublickeyRepositoryDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasAuthentication(Authentication expected) {
assertThat(actual.getAuthentication()).isEqualTo(expected);
return this;
}
public ConfigurationAssert isRemoteAllowed(boolean expected) {
assertThat(actual.isRemoteAllowed()).isEqualTo(expected);
return this;
}
private static String cleanPath(String absolutePath) {
return new File(absolutePath).getPath();
}
}
|
...
package tests;
import static org.fest.assertions.Assertions.assertThat;
import java.io.File;
import org.fest.assertions.GenericAssert;
import org.kercoin.magrit.Configuration;
...
}
public ConfigurationAssert hasHomeDir(String absolutePath) {
assertThat(actual.getRepositoriesHomeDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasWorkDir(String absolutePath) {
assertThat(actual.getWorkHomeDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
public ConfigurationAssert hasPublickeyDir(String absolutePath) {
assertThat(actual.getPublickeyRepositoryDir().getAbsolutePath()).isEqualTo(cleanPath(absolutePath));
return this;
}
...
return this;
}
private static String cleanPath(String absolutePath) {
return new File(absolutePath).getPath();
}
}
...
|
3dda5003b3ce345a08369b15fc3447d2a4c7d1ad
|
examples/plotting_2d.py
|
examples/plotting_2d.py
|
from bluesky.examples import *
from bluesky.standard_config import RE
from matplotlib import pyplot as plt
from xray_vision.backend.mpl.cross_section_2d import CrossSection
import numpy as np
import filestore.api as fsapi
import time as ttime
from filestore.handlers import NpyHandler
fsapi.register_handler('npy', NpyHandler)
def stepscan(motor, det):
for i in np.linspace(-5, 5, 75):
yield Msg('create')
yield Msg('set', motor, i)
yield Msg('trigger', det)
yield Msg('read', motor)
yield Msg('read', det)
yield Msg('save')
ic = LiveImage('det_2d')
table_callback = LiveTable(fields=[motor._name, det_2d._name])
RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
|
from bluesky.examples import *
from bluesky.tests.utils import setup_test_run_engine
from matplotlib import pyplot as plt
from xray_vision.backend.mpl.cross_section_2d import CrossSection
import numpy as np
import filestore.api as fsapi
import time as ttime
from filestore.handlers import NpyHandler
fsapi.register_handler('npy', NpyHandler)
def stepscan(motor, det):
for i in np.linspace(-5, 5, 75):
yield Msg('create')
yield Msg('set', motor, i)
yield Msg('trigger', det)
yield Msg('read', motor)
yield Msg('read', det)
yield Msg('save')
ic = LiveImage('det_2d')
table_callback = LiveTable(fields=[motor._name, det_2d._name])
RE = setup_test_run_engine()
RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
|
Set up RunEngine with required metadata.
|
FIX: Set up RunEngine with required metadata.
|
Python
|
bsd-3-clause
|
ericdill/bluesky,sameera2004/bluesky,sameera2004/bluesky,klauer/bluesky,klauer/bluesky,dchabot/bluesky,ericdill/bluesky,dchabot/bluesky
|
python
|
## Code Before:
from bluesky.examples import *
from bluesky.standard_config import RE
from matplotlib import pyplot as plt
from xray_vision.backend.mpl.cross_section_2d import CrossSection
import numpy as np
import filestore.api as fsapi
import time as ttime
from filestore.handlers import NpyHandler
fsapi.register_handler('npy', NpyHandler)
def stepscan(motor, det):
for i in np.linspace(-5, 5, 75):
yield Msg('create')
yield Msg('set', motor, i)
yield Msg('trigger', det)
yield Msg('read', motor)
yield Msg('read', det)
yield Msg('save')
ic = LiveImage('det_2d')
table_callback = LiveTable(fields=[motor._name, det_2d._name])
RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
## Instruction:
FIX: Set up RunEngine with required metadata.
## Code After:
from bluesky.examples import *
from bluesky.tests.utils import setup_test_run_engine
from matplotlib import pyplot as plt
from xray_vision.backend.mpl.cross_section_2d import CrossSection
import numpy as np
import filestore.api as fsapi
import time as ttime
from filestore.handlers import NpyHandler
fsapi.register_handler('npy', NpyHandler)
def stepscan(motor, det):
for i in np.linspace(-5, 5, 75):
yield Msg('create')
yield Msg('set', motor, i)
yield Msg('trigger', det)
yield Msg('read', motor)
yield Msg('read', det)
yield Msg('save')
ic = LiveImage('det_2d')
table_callback = LiveTable(fields=[motor._name, det_2d._name])
RE = setup_test_run_engine()
RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
|
# ... existing code ...
from bluesky.examples import *
from bluesky.tests.utils import setup_test_run_engine
from matplotlib import pyplot as plt
from xray_vision.backend.mpl.cross_section_2d import CrossSection
import numpy as np
# ... modified code ...
ic = LiveImage('det_2d')
table_callback = LiveTable(fields=[motor._name, det_2d._name])
RE = setup_test_run_engine()
RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
# ... rest of the code ...
|
05e19922a5a0f7268ce1a34e25e5deb8e9a2f5d3
|
sfmtools.py
|
sfmtools.py
|
""" Utility functions for PhotoScan processing """
import os, sys
import PhotoScan
def align_and_clean_photos(chunk):
ncameras = len(chunk.cameras)
for frame in chunk.frames:
frame.matchPhotos()
chunk.alignCameras()
for camera in chunk.cameras:
if camera.transform is None:
chunk.remove(camera)
naligned = len(chunk.cameras)
print('%d/%d cameras aligned' % (naligned, ncameras))
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
for chunk in PhotoScan.app.document.chunks:
filename = ''.join([pathname, chunk.label.split(' '), '.', formatstring])
exported = chunk.exportDem(filename, format=formatstring, dx=resolution, dy=resolution)
if not exported:
print('Export failed:', chunk.label)
nexported -= 1
print('%d/%d DEMs exported' % (nexported, nchunks))
|
""" Utility functions for PhotoScan processing """
import os, sys
import PhotoScan
def align_and_clean_photos(chunk):
ncameras = len(chunk.cameras)
for frame in chunk.frames:
frame.matchPhotos()
chunk.alignCameras()
for camera in chunk.cameras:
if camera.transform is None:
chunk.remove(camera)
naligned = len(chunk.cameras)
print('%d/%d cameras aligned' % (naligned, ncameras))
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
if pathname[-1:] is not '/':
pathname = ''.join(pathname, '/')
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
for chunk in PhotoScan.app.document.chunks:
filename = ''.join([pathname, chunk.label.split(' '), '.', formatstring])
exported = chunk.exportDem(filename, format=formatstring, dx=resolution, dy=resolution)
if not exported:
print('Export failed:', chunk.label)
nexported -= 1
print('%d/%d DEMs exported' % (nexported, nchunks))
|
Check for trailing slash in path
|
Check for trailing slash in path
|
Python
|
mit
|
rmsare/sfmtools
|
python
|
## Code Before:
""" Utility functions for PhotoScan processing """
import os, sys
import PhotoScan
def align_and_clean_photos(chunk):
ncameras = len(chunk.cameras)
for frame in chunk.frames:
frame.matchPhotos()
chunk.alignCameras()
for camera in chunk.cameras:
if camera.transform is None:
chunk.remove(camera)
naligned = len(chunk.cameras)
print('%d/%d cameras aligned' % (naligned, ncameras))
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
for chunk in PhotoScan.app.document.chunks:
filename = ''.join([pathname, chunk.label.split(' '), '.', formatstring])
exported = chunk.exportDem(filename, format=formatstring, dx=resolution, dy=resolution)
if not exported:
print('Export failed:', chunk.label)
nexported -= 1
print('%d/%d DEMs exported' % (nexported, nchunks))
## Instruction:
Check for trailing slash in path
## Code After:
""" Utility functions for PhotoScan processing """
import os, sys
import PhotoScan
def align_and_clean_photos(chunk):
ncameras = len(chunk.cameras)
for frame in chunk.frames:
frame.matchPhotos()
chunk.alignCameras()
for camera in chunk.cameras:
if camera.transform is None:
chunk.remove(camera)
naligned = len(chunk.cameras)
print('%d/%d cameras aligned' % (naligned, ncameras))
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
if pathname[-1:] is not '/':
pathname = ''.join(pathname, '/')
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
for chunk in PhotoScan.app.document.chunks:
filename = ''.join([pathname, chunk.label.split(' '), '.', formatstring])
exported = chunk.exportDem(filename, format=formatstring, dx=resolution, dy=resolution)
if not exported:
print('Export failed:', chunk.label)
nexported -= 1
print('%d/%d DEMs exported' % (nexported, nchunks))
|
...
def export_dems(resolution, formatstring, pathname)
if not os.path.isdir(pathname):
os.mkdir(pathname)
if pathname[-1:] is not '/':
pathname = ''.join(pathname, '/')
nchunks = len(PhotoScan.app.document.chunks)
nexported = nchunks
...
|
0e866db1377e4c58ef05d66583cea6e35071ba20
|
nnpy/errors.py
|
nnpy/errors.py
|
from _nnpy import ffi, lib as nanomsg
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super().__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
if rc < 0:
error_no = nanomsg.nn_errno()
chars = nanomsg.nn_strerror(error_no)
msg = ffi.string(chars).decode()
raise NNError(error_no, msg)
if callable(value):
return value()
return value
|
from _nnpy import ffi, lib as nanomsg
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super(NNError, self).__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
if rc < 0:
error_no = nanomsg.nn_errno()
chars = nanomsg.nn_strerror(error_no)
msg = ffi.string(chars).decode()
raise NNError(error_no, msg)
if callable(value):
return value()
return value
|
Fix incorrect args to super
|
Fix incorrect args to super
|
Python
|
mit
|
nanomsg/nnpy
|
python
|
## Code Before:
from _nnpy import ffi, lib as nanomsg
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super().__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
if rc < 0:
error_no = nanomsg.nn_errno()
chars = nanomsg.nn_strerror(error_no)
msg = ffi.string(chars).decode()
raise NNError(error_no, msg)
if callable(value):
return value()
return value
## Instruction:
Fix incorrect args to super
## Code After:
from _nnpy import ffi, lib as nanomsg
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super(NNError, self).__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
if rc < 0:
error_no = nanomsg.nn_errno()
chars = nanomsg.nn_strerror(error_no)
msg = ffi.string(chars).decode()
raise NNError(error_no, msg)
if callable(value):
return value()
return value
|
// ... existing code ...
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super(NNError, self).__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
// ... rest of the code ...
|
51346025b638159c69fe2c8da85170784d065d60
|
test_passgen.py
|
test_passgen.py
|
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
|
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
|
Add unit test for valid blacklist
|
Add unit test for valid blacklist
|
Python
|
mit
|
Videonauth/passgen
|
python
|
## Code Before:
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
## Instruction:
Add unit test for valid blacklist
## Code After:
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
|
# ... existing code ...
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
# ... rest of the code ...
|
9cad7f5836ea9b98ef6d2ea44e2c3df71e9780e0
|
src/test/java/com/google/sps/TripsServletTest.java
|
src/test/java/com/google/sps/TripsServletTest.java
|
package com.google.sps;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.sps.data.Trip;
import com.google.sps.servlets.TripsServlet;
import com.google.sps.util.TripDataConverter;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class TripsServletTest {
}
|
package com.google.sps;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.sps.data.Trip;
import com.google.sps.servlets.TripsServlet;
import com.google.sps.util.TripDataConverter;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class TripsServletTest {
private LocalServiceTestHelper helper;
@Before
public void setUp() {
helper = new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig());
}
@After
public void tearDown() {
helper.tearDown();
}
@Test
public void testUnauthenticatedUser() throws IOException {
helper.setEnvIsLoggedIn(false);
helper.setUp();
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
PrintWriter pw = new PrintWriter(new StringWriter());
when(response.getWriter()).thenReturn(pw);
TripsServlet servlet = new TripsServlet();
servlet.doGet(request, response);
servlet.doPost(request, response);
servlet.doPut(request, response);
servlet.doDelete(request, response);
verify(response, times(4)).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
@Test
public void testDoGet() {
helper.setEnvIsLoggedIn(true);
helper.setEnvEmail("[email protected]");
helper.setEnvAuthDomain("google.com");
helper.setUp();
}
}
|
Add test for unauthenticated user all 4 HTTP methods in TripsServlet
|
Add test for unauthenticated user all 4 HTTP methods in TripsServlet
|
Java
|
apache-2.0
|
googleinterns/step27-2020,googleinterns/step27-2020,googleinterns/step27-2020
|
java
|
## Code Before:
package com.google.sps;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.sps.data.Trip;
import com.google.sps.servlets.TripsServlet;
import com.google.sps.util.TripDataConverter;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class TripsServletTest {
}
## Instruction:
Add test for unauthenticated user all 4 HTTP methods in TripsServlet
## Code After:
package com.google.sps;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.sps.data.Trip;
import com.google.sps.servlets.TripsServlet;
import com.google.sps.util.TripDataConverter;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class TripsServletTest {
private LocalServiceTestHelper helper;
@Before
public void setUp() {
helper = new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig());
}
@After
public void tearDown() {
helper.tearDown();
}
@Test
public void testUnauthenticatedUser() throws IOException {
helper.setEnvIsLoggedIn(false);
helper.setUp();
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
PrintWriter pw = new PrintWriter(new StringWriter());
when(response.getWriter()).thenReturn(pw);
TripsServlet servlet = new TripsServlet();
servlet.doGet(request, response);
servlet.doPost(request, response);
servlet.doPut(request, response);
servlet.doDelete(request, response);
verify(response, times(4)).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
@Test
public void testDoGet() {
helper.setEnvIsLoggedIn(true);
helper.setEnvEmail("[email protected]");
helper.setEnvAuthDomain("google.com");
helper.setUp();
}
}
|
// ... existing code ...
package com.google.sps;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
// ... modified code ...
@RunWith(JUnit4.class)
public class TripsServletTest {
private LocalServiceTestHelper helper;
@Before
public void setUp() {
helper = new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig());
}
@After
public void tearDown() {
helper.tearDown();
}
@Test
public void testUnauthenticatedUser() throws IOException {
helper.setEnvIsLoggedIn(false);
helper.setUp();
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
PrintWriter pw = new PrintWriter(new StringWriter());
when(response.getWriter()).thenReturn(pw);
TripsServlet servlet = new TripsServlet();
servlet.doGet(request, response);
servlet.doPost(request, response);
servlet.doPut(request, response);
servlet.doDelete(request, response);
verify(response, times(4)).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
@Test
public void testDoGet() {
helper.setEnvIsLoggedIn(true);
helper.setEnvEmail("[email protected]");
helper.setEnvAuthDomain("google.com");
helper.setUp();
}
}
// ... rest of the code ...
|
eb5294f0df32442dbd7431fd9200388ca4c63d62
|
tests/builtins/test_reversed.py
|
tests/builtins/test_reversed.py
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ReversedTests(TranspileTestCase):
pass
class BuiltinReversedFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["reversed"]
not_implemented = [
'test_range',
]
|
from .. utils import SAMPLE_DATA, TranspileTestCase, BuiltinFunctionTestCase
def _iterate_test(datatype):
def test_func(self):
code = '\n'.join([
'\nfor x in {value}:\n print(x)\n'.format(value=value)
for value in SAMPLE_DATA[datatype]
])
self.assertCodeExecution(code)
return test_func
class ReversedTests(TranspileTestCase):
# test_iterate_bytearray = _iterate_test('bytearray')
test_iterate_bytes = _iterate_test('bytes')
test_iterate_list = _iterate_test('list')
test_iterate_range = _iterate_test('range')
test_iterate_str = _iterate_test('str')
test_iterate_tuple = _iterate_test('tuple')
class BuiltinReversedFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["reversed"]
not_implemented = [
'test_range',
]
|
Add iteration tests for reversed type
|
Add iteration tests for reversed type
|
Python
|
bsd-3-clause
|
cflee/voc,cflee/voc,freakboy3742/voc,freakboy3742/voc
|
python
|
## Code Before:
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ReversedTests(TranspileTestCase):
pass
class BuiltinReversedFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["reversed"]
not_implemented = [
'test_range',
]
## Instruction:
Add iteration tests for reversed type
## Code After:
from .. utils import SAMPLE_DATA, TranspileTestCase, BuiltinFunctionTestCase
def _iterate_test(datatype):
def test_func(self):
code = '\n'.join([
'\nfor x in {value}:\n print(x)\n'.format(value=value)
for value in SAMPLE_DATA[datatype]
])
self.assertCodeExecution(code)
return test_func
class ReversedTests(TranspileTestCase):
# test_iterate_bytearray = _iterate_test('bytearray')
test_iterate_bytes = _iterate_test('bytes')
test_iterate_list = _iterate_test('list')
test_iterate_range = _iterate_test('range')
test_iterate_str = _iterate_test('str')
test_iterate_tuple = _iterate_test('tuple')
class BuiltinReversedFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["reversed"]
not_implemented = [
'test_range',
]
|
// ... existing code ...
from .. utils import SAMPLE_DATA, TranspileTestCase, BuiltinFunctionTestCase
def _iterate_test(datatype):
def test_func(self):
code = '\n'.join([
'\nfor x in {value}:\n print(x)\n'.format(value=value)
for value in SAMPLE_DATA[datatype]
])
self.assertCodeExecution(code)
return test_func
class ReversedTests(TranspileTestCase):
# test_iterate_bytearray = _iterate_test('bytearray')
test_iterate_bytes = _iterate_test('bytes')
test_iterate_list = _iterate_test('list')
test_iterate_range = _iterate_test('range')
test_iterate_str = _iterate_test('str')
test_iterate_tuple = _iterate_test('tuple')
class BuiltinReversedFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
// ... rest of the code ...
|
8267f2db09363d762434142d4c97d17915fd0483
|
rxappfocus-sample/src/main/java/com/example/rxappfocus/App.java
|
rxappfocus-sample/src/main/java/com/example/rxappfocus/App.java
|
package com.example.rxappfocus;
import android.app.Application;
import android.widget.Toast;
import com.gramboid.rxappfocus.AppFocusProvider;
import rx.functions.Action1;
public class App extends Application {
private AppFocusProvider focusProvider;
@Override
public void onCreate() {
super.onCreate();
focusProvider = new AppFocusProvider(this);
focusProvider
.getAppFocus()
.subscribe(new Action1<Boolean>() {
@Override
public void call(Boolean visible) {
Toast.makeText(App.this, visible ? "App visible" : "App hidden", Toast.LENGTH_SHORT).show();
}
});
}
public AppFocusProvider getFocusProvider() {
return focusProvider;
}
}
|
package com.example.rxappfocus;
import android.app.Application;
import android.widget.Toast;
import com.gramboid.rxappfocus.AppFocusProvider;
import rx.functions.Action1;
public class App extends Application {
private AppFocusProvider focusProvider;
@Override
public void onCreate() {
super.onCreate();
focusProvider = new AppFocusProvider(this);
// show a toast every time the app becomes visible or hidden
focusProvider.getAppFocus()
.subscribe(new Action1<Boolean>() {
@Override
public void call(Boolean visible) {
Toast.makeText(App.this, visible ? "App visible" : "App hidden", Toast.LENGTH_SHORT).show();
}
});
}
public AppFocusProvider getFocusProvider() {
return focusProvider;
}
}
|
Tidy up and add comment
|
Tidy up and add comment
|
Java
|
apache-2.0
|
gramboid/RxAppFocus
|
java
|
## Code Before:
package com.example.rxappfocus;
import android.app.Application;
import android.widget.Toast;
import com.gramboid.rxappfocus.AppFocusProvider;
import rx.functions.Action1;
public class App extends Application {
private AppFocusProvider focusProvider;
@Override
public void onCreate() {
super.onCreate();
focusProvider = new AppFocusProvider(this);
focusProvider
.getAppFocus()
.subscribe(new Action1<Boolean>() {
@Override
public void call(Boolean visible) {
Toast.makeText(App.this, visible ? "App visible" : "App hidden", Toast.LENGTH_SHORT).show();
}
});
}
public AppFocusProvider getFocusProvider() {
return focusProvider;
}
}
## Instruction:
Tidy up and add comment
## Code After:
package com.example.rxappfocus;
import android.app.Application;
import android.widget.Toast;
import com.gramboid.rxappfocus.AppFocusProvider;
import rx.functions.Action1;
public class App extends Application {
private AppFocusProvider focusProvider;
@Override
public void onCreate() {
super.onCreate();
focusProvider = new AppFocusProvider(this);
// show a toast every time the app becomes visible or hidden
focusProvider.getAppFocus()
.subscribe(new Action1<Boolean>() {
@Override
public void call(Boolean visible) {
Toast.makeText(App.this, visible ? "App visible" : "App hidden", Toast.LENGTH_SHORT).show();
}
});
}
public AppFocusProvider getFocusProvider() {
return focusProvider;
}
}
|
# ... existing code ...
public void onCreate() {
super.onCreate();
focusProvider = new AppFocusProvider(this);
// show a toast every time the app becomes visible or hidden
focusProvider.getAppFocus()
.subscribe(new Action1<Boolean>() {
@Override
public void call(Boolean visible) {
# ... rest of the code ...
|
86e6cb5e32b9698fad734da9ce4c2be8dea586e0
|
resolverapi/__init__.py
|
resolverapi/__init__.py
|
import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
|
import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
Create root page for api.openresolve.com
|
Create root page for api.openresolve.com
|
Python
|
bsd-2-clause
|
opendns/OpenResolve
|
python
|
## Code Before:
import os
from flask import Flask
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Health check. No data returned. Just 200."""
return '', 200
return app
## Instruction:
Create root page for api.openresolve.com
## Code After:
import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
dns_resolver = Resolver()
def create_app(config_name):
app = Flask(__name__)
if config_name == 'dev':
app.config.from_object('resolverapi.config.DevelopmentConfig')
else:
app.config.from_object('resolverapi.config.BaseConfig')
# Get nameservers from environment variable or default to OpenDNS resolvers
if os.environ.get('RESOLVERS'):
app.config['RESOLVERS'] = [addr.strip() for addr in os.environ.get('RESOLVERS').split(',')]
# Respond with Access-Control-Allow-Origin headers. Use * to accept all
if os.environ.get('CORS_ORIGIN'):
CORS(app, origins=os.environ.get('CORS_ORIGIN'))
dns_resolver.lifetime = 3.0
from resolverapi.endpoints import ReverseLookup
from resolverapi.endpoints import LookupRecordType
api = Api(app)
api.add_resource(ReverseLookup, '/reverse/<ip>')
api.add_resource(LookupRecordType, '/<rdtype>/<domain>')
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
|
# ... existing code ...
import os
from flask import Flask, jsonify
from flask_restful import Api
from dns.resolver import Resolver
from flask_cors import CORS
# ... modified code ...
@app.route('/')
def root():
"""Provide user a link to the main page. Also this route acts as a health check, returns 200."""
return jsonify({'message': "Check out www.openresolve.com for usage."}), 200
return app
# ... rest of the code ...
|
501eb4ee71e47d90c155072b15d8ad840ff01098
|
voting/management/commands/send_vote_invitation_emails.py
|
voting/management/commands/send_vote_invitation_emails.py
|
import datetime
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from project import settings
from voting.models import VoteToken
class Command(BaseCommand):
def handle(self, *args, **options):
voting_enabled = settings.VOTING_ENABLED
if not voting_enabled:
print 'Voting is disabled'
return
vote_tokens = VoteToken.objects.filter(token_sent__isnull=True).select_related('user')
txt_template = get_template('voting/email/vote_invite.txt')
html_template = get_template('voting/email/vote_invite.html')
for vote_token in vote_tokens:
context = Context({'token': vote_token})
txt = txt_template.render(context)
html = html_template.render(context)
msg = EmailMultiAlternatives(
'Community voting open',
txt,
'WebCamp Zagreb <[email protected]>',
[vote_token.user.email],
)
msg.attach_alternative(html, "text/html")
msg.send()
vote_token.token_sent = datetime.datetime.now()
vote_token.save()
|
import datetime
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from project import settings
from voting.models import VoteToken
class Command(BaseCommand):
def handle(self, *args, **options):
voting_enabled = settings.VOTING_ENABLED
if not voting_enabled:
print 'Voting is disabled'
return
vote_tokens = VoteToken.objects.filter(token_sent__isnull=True).select_related('user')
txt_template = get_template('voting/email/vote_invite.txt')
html_template = get_template('voting/email/vote_invite.html')
for vote_token in vote_tokens:
context = Context({'token': vote_token})
txt = txt_template.render(context)
html = html_template.render(context)
msg = EmailMultiAlternatives(
'Community voting open',
txt,
'WebCamp Zagreb <[email protected]>',
[vote_token.user.email],
)
msg.attach_alternative(html, "text/html")
msg.send()
print "Voting email sent to %r" % vote_token.user.email
vote_token.token_sent = datetime.datetime.now()
vote_token.save()
|
Add print statement to send invite command
|
Add print statement to send invite command
|
Python
|
bsd-3-clause
|
WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web
|
python
|
## Code Before:
import datetime
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from project import settings
from voting.models import VoteToken
class Command(BaseCommand):
def handle(self, *args, **options):
voting_enabled = settings.VOTING_ENABLED
if not voting_enabled:
print 'Voting is disabled'
return
vote_tokens = VoteToken.objects.filter(token_sent__isnull=True).select_related('user')
txt_template = get_template('voting/email/vote_invite.txt')
html_template = get_template('voting/email/vote_invite.html')
for vote_token in vote_tokens:
context = Context({'token': vote_token})
txt = txt_template.render(context)
html = html_template.render(context)
msg = EmailMultiAlternatives(
'Community voting open',
txt,
'WebCamp Zagreb <[email protected]>',
[vote_token.user.email],
)
msg.attach_alternative(html, "text/html")
msg.send()
vote_token.token_sent = datetime.datetime.now()
vote_token.save()
## Instruction:
Add print statement to send invite command
## Code After:
import datetime
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from project import settings
from voting.models import VoteToken
class Command(BaseCommand):
def handle(self, *args, **options):
voting_enabled = settings.VOTING_ENABLED
if not voting_enabled:
print 'Voting is disabled'
return
vote_tokens = VoteToken.objects.filter(token_sent__isnull=True).select_related('user')
txt_template = get_template('voting/email/vote_invite.txt')
html_template = get_template('voting/email/vote_invite.html')
for vote_token in vote_tokens:
context = Context({'token': vote_token})
txt = txt_template.render(context)
html = html_template.render(context)
msg = EmailMultiAlternatives(
'Community voting open',
txt,
'WebCamp Zagreb <[email protected]>',
[vote_token.user.email],
)
msg.attach_alternative(html, "text/html")
msg.send()
print "Voting email sent to %r" % vote_token.user.email
vote_token.token_sent = datetime.datetime.now()
vote_token.save()
|
...
msg.attach_alternative(html, "text/html")
msg.send()
print "Voting email sent to %r" % vote_token.user.email
vote_token.token_sent = datetime.datetime.now()
vote_token.save()
...
|
3039b00e761f02eb0586dad51049377a31329491
|
reggae/reflect.py
|
reggae/reflect.py
|
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [m.__file__ for m in finder.modules.values()]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [os.path.abspath(m.__file__) for
m in finder.modules.values() if m.__file__ is not None]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
Use absolute paths for dependencies
|
Use absolute paths for dependencies
|
Python
|
bsd-3-clause
|
atilaneves/reggae-python
|
python
|
## Code Before:
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [m.__file__ for m in finder.modules.values()]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
## Instruction:
Use absolute paths for dependencies
## Code After:
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [os.path.abspath(m.__file__) for
m in finder.modules.values() if m.__file__ is not None]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
...
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [os.path.abspath(m.__file__) for
m in finder.modules.values() if m.__file__ is not None]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
...
|
e395d32770c2a4f7a2e4cab98d0a459e690ffeba
|
zeus/api/schemas/job.py
|
zeus/api/schemas/job.py
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
Add updated_at to Job schema
|
feat: Add updated_at to Job schema
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
python
|
## Code Before:
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
## Instruction:
feat: Add updated_at to Job schema
## Code After:
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
// ... existing code ...
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
// ... rest of the code ...
|
691aa16d4a8cb2836fcd62b7376ffdcbb8df75cd
|
src/main/java/io/tus/java/client/ProtocolException.java
|
src/main/java/io/tus/java/client/ProtocolException.java
|
package io.tus.java.client;
import java.io.IOException;
import java.net.HttpURLConnection;
/**
* This exception is thrown if the server sends a request with an unexpected status code or
* missing/invalid headers.
*/
public class ProtocolException extends Exception {
private HttpURLConnection connection;
public ProtocolException(String message) {
super(message);
}
public ProtocolException(String message, HttpURLConnection connection) {
super(message);
this.connection = connection;
}
public HttpURLConnection getCausingConnection() {
return connection;
}
public boolean shouldRetry() {
if(connection == null) {
return false;
}
try {
int responseCode = connection.getResponseCode();
return responseCode >= 500 && responseCode < 600 || responseCode == 423;
} catch(IOException e) {
return false;
}
}
}
|
package io.tus.java.client;
import java.io.IOException;
import java.net.HttpURLConnection;
/**
* This exception is thrown if the server sends a request with an unexpected status code or
* missing/invalid headers.
*/
public class ProtocolException extends Exception {
private HttpURLConnection connection;
public ProtocolException(String message) {
super(message);
}
public ProtocolException(String message, HttpURLConnection connection) {
super(message);
this.connection = connection;
}
public HttpURLConnection getCausingConnection() {
return connection;
}
public boolean shouldRetry() {
if(connection == null) {
return false;
}
try {
int responseCode = connection.getResponseCode();
// 5XX and 423 Resource Locked status codes should be retried.
return (responseCode >= 500 && responseCode < 600) || responseCode == 423;
} catch(IOException e) {
return false;
}
}
}
|
Add explaination to selection of retrying status codes
|
Add explaination to selection of retrying status codes
|
Java
|
mit
|
tus/tus-java-client
|
java
|
## Code Before:
package io.tus.java.client;
import java.io.IOException;
import java.net.HttpURLConnection;
/**
* This exception is thrown if the server sends a request with an unexpected status code or
* missing/invalid headers.
*/
public class ProtocolException extends Exception {
private HttpURLConnection connection;
public ProtocolException(String message) {
super(message);
}
public ProtocolException(String message, HttpURLConnection connection) {
super(message);
this.connection = connection;
}
public HttpURLConnection getCausingConnection() {
return connection;
}
public boolean shouldRetry() {
if(connection == null) {
return false;
}
try {
int responseCode = connection.getResponseCode();
return responseCode >= 500 && responseCode < 600 || responseCode == 423;
} catch(IOException e) {
return false;
}
}
}
## Instruction:
Add explaination to selection of retrying status codes
## Code After:
package io.tus.java.client;
import java.io.IOException;
import java.net.HttpURLConnection;
/**
* This exception is thrown if the server sends a request with an unexpected status code or
* missing/invalid headers.
*/
public class ProtocolException extends Exception {
private HttpURLConnection connection;
public ProtocolException(String message) {
super(message);
}
public ProtocolException(String message, HttpURLConnection connection) {
super(message);
this.connection = connection;
}
public HttpURLConnection getCausingConnection() {
return connection;
}
public boolean shouldRetry() {
if(connection == null) {
return false;
}
try {
int responseCode = connection.getResponseCode();
// 5XX and 423 Resource Locked status codes should be retried.
return (responseCode >= 500 && responseCode < 600) || responseCode == 423;
} catch(IOException e) {
return false;
}
}
}
|
# ... existing code ...
try {
int responseCode = connection.getResponseCode();
// 5XX and 423 Resource Locked status codes should be retried.
return (responseCode >= 500 && responseCode < 600) || responseCode == 423;
} catch(IOException e) {
return false;
}
# ... rest of the code ...
|
35f20097b6dfff413db20f11edb1969d8fa9490e
|
src/main/java/org/amc/servlet/listener/APLSessionListener.java
|
src/main/java/org/amc/servlet/listener/APLSessionListener.java
|
package org.amc.servlet.listener;
/**
*
* @author Adrian Mclaughlin
* @version 1
*/
import org.apache.log4j.Logger;
import javax.servlet.ServletContext;
import javax.servlet.annotation.WebListener;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
import com.sun.security.auth.UserPrincipal;
@WebListener
public class APLSessionListener implements HttpSessionListener
{
private static Logger logger=Logger.getLogger(APLSessionListener.class);
public static int count=0;
@Override
public void sessionCreated(HttpSessionEvent arg0)
{
synchronized(this)
{
count++;
String address="";
logger.info("(Create) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
@Override
public void sessionDestroyed(HttpSessionEvent arg0)
{
synchronized(this)
{
if(count>0)
{
count--;
}
logger.info("(Destroy) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
private void updateSerlvetContext(HttpSessionEvent arg0)
{
synchronized(arg0.getSession().getServletContext())
{
ServletContext context=arg0.getSession().getServletContext();
context.setAttribute("session_count", count);
}
}
}
|
package org.amc.servlet.listener;
/**
*
* @author Adrian Mclaughlin
* @version 1
*/
import org.apache.log4j.Logger;
import javax.servlet.ServletContext;
import javax.servlet.annotation.WebListener;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
@WebListener
public class APLSessionListener implements HttpSessionListener
{
private static Logger logger=Logger.getLogger(APLSessionListener.class);
public static int count=0;
@Override
public void sessionCreated(HttpSessionEvent arg0)
{
synchronized(this)
{
count++;
String address="";
logger.info("(Create) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
@Override
public void sessionDestroyed(HttpSessionEvent arg0)
{
synchronized(this)
{
if(count>0)
{
count--;
}
logger.info("(Destroy) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
private void updateSerlvetContext(HttpSessionEvent arg0)
{
synchronized(arg0.getSession().getServletContext())
{
ServletContext context=arg0.getSession().getServletContext();
context.setAttribute("session_count", count);
}
}
}
|
Remove unnecessary and incorrect import
|
Remove unnecessary and incorrect import
|
Java
|
mit
|
subwoofer359/aplsystem,subwoofer359/aplsystem,subwoofer359/aplsystem,subwoofer359/aplsystem
|
java
|
## Code Before:
package org.amc.servlet.listener;
/**
*
* @author Adrian Mclaughlin
* @version 1
*/
import org.apache.log4j.Logger;
import javax.servlet.ServletContext;
import javax.servlet.annotation.WebListener;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
import com.sun.security.auth.UserPrincipal;
@WebListener
public class APLSessionListener implements HttpSessionListener
{
private static Logger logger=Logger.getLogger(APLSessionListener.class);
public static int count=0;
@Override
public void sessionCreated(HttpSessionEvent arg0)
{
synchronized(this)
{
count++;
String address="";
logger.info("(Create) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
@Override
public void sessionDestroyed(HttpSessionEvent arg0)
{
synchronized(this)
{
if(count>0)
{
count--;
}
logger.info("(Destroy) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
private void updateSerlvetContext(HttpSessionEvent arg0)
{
synchronized(arg0.getSession().getServletContext())
{
ServletContext context=arg0.getSession().getServletContext();
context.setAttribute("session_count", count);
}
}
}
## Instruction:
Remove unnecessary and incorrect import
## Code After:
package org.amc.servlet.listener;
/**
*
* @author Adrian Mclaughlin
* @version 1
*/
import org.apache.log4j.Logger;
import javax.servlet.ServletContext;
import javax.servlet.annotation.WebListener;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
@WebListener
public class APLSessionListener implements HttpSessionListener
{
private static Logger logger=Logger.getLogger(APLSessionListener.class);
public static int count=0;
@Override
public void sessionCreated(HttpSessionEvent arg0)
{
synchronized(this)
{
count++;
String address="";
logger.info("(Create) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
@Override
public void sessionDestroyed(HttpSessionEvent arg0)
{
synchronized(this)
{
if(count>0)
{
count--;
}
logger.info("(Destroy) There are "+count+" sessions");
updateSerlvetContext(arg0);
}
}
private void updateSerlvetContext(HttpSessionEvent arg0)
{
synchronized(arg0.getSession().getServletContext())
{
ServletContext context=arg0.getSession().getServletContext();
context.setAttribute("session_count", count);
}
}
}
|
# ... existing code ...
import javax.servlet.ServletContext;
import javax.servlet.annotation.WebListener;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
@WebListener
public class APLSessionListener implements HttpSessionListener
# ... rest of the code ...
|
29da36393653eeb844dceda201553e3bc9963da1
|
services/pvlogger/src/xal/service/pvlogger/Main.java
|
services/pvlogger/src/xal/service/pvlogger/Main.java
|
/*
* Main.java
*
* Created on Wed Jan 14 13:03:12 EST 2004
*
* Copyright (c) 2004 Spallation Neutron Source
* Oak Ridge National Laboratory
* Oak Ridge, TN 37830
*/
package xal.service.pvlogger;
import xal.extension.service.ServiceDirectory;
/**
* Main
*
* @author tap
*/
public class Main {
protected LoggerModel model;
/** Main Constructor */
public Main() {
model = new LoggerModel();
}
/**
* run the service by starting the logger
*/
protected void run() {
model.startLogging();
new LoggerService(model);
}
/**
* Main entry point to the service. Run the service.
* @param args The launch arguments to the service.
*/
static public void main(String[] args) {
new Main().run();
}
}
|
/*
* Main.java
*
* Created on Wed Jan 14 13:03:12 EST 2004
*
* Copyright (c) 2004 Spallation Neutron Source
* Oak Ridge National Laboratory
* Oak Ridge, TN 37830
*/
package xal.service.pvlogger;
import xal.extension.service.ServiceDirectory;
/**
* Main
*
* @author tap
*/
public class Main {
protected LoggerModel model;
/** Main Constructor */
public Main() {
model = new LoggerModel();
}
/**
* run the service by starting the logger
*/
protected void run() {
// get flag to test whether logging should be periodic and on-demand (default) or only on-demand
final boolean periodicLogging = !Boolean.getBoolean( "xal.logging.noperiod" );
if ( periodicLogging ) {
model.startLogging();
}
else {
System.out.println( "Warning! Periodic logging has been disabled due to command line flag. Will log on demand only." );
}
new LoggerService(model);
}
/**
* Main entry point to the service. Run the service.
* @param args The launch arguments to the service.
*/
static public void main(String[] args) {
new Main().run();
}
}
|
Add a flag to the PV Logger service to disable periodic logging and only allow on-demand logging.
|
Add a flag to the PV Logger service to disable periodic logging and only allow on-demand logging.
|
Java
|
bsd-3-clause
|
EuropeanSpallationSource/openxal,openxal/openxal,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,openxal/openxal,openxal/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,openxal/openxal,openxal/openxal,luxiaohan/openxal-csns-luxh,luxiaohan/openxal-csns-luxh
|
java
|
## Code Before:
/*
* Main.java
*
* Created on Wed Jan 14 13:03:12 EST 2004
*
* Copyright (c) 2004 Spallation Neutron Source
* Oak Ridge National Laboratory
* Oak Ridge, TN 37830
*/
package xal.service.pvlogger;
import xal.extension.service.ServiceDirectory;
/**
* Main
*
* @author tap
*/
public class Main {
protected LoggerModel model;
/** Main Constructor */
public Main() {
model = new LoggerModel();
}
/**
* run the service by starting the logger
*/
protected void run() {
model.startLogging();
new LoggerService(model);
}
/**
* Main entry point to the service. Run the service.
* @param args The launch arguments to the service.
*/
static public void main(String[] args) {
new Main().run();
}
}
## Instruction:
Add a flag to the PV Logger service to disable periodic logging and only allow on-demand logging.
## Code After:
/*
* Main.java
*
* Created on Wed Jan 14 13:03:12 EST 2004
*
* Copyright (c) 2004 Spallation Neutron Source
* Oak Ridge National Laboratory
* Oak Ridge, TN 37830
*/
package xal.service.pvlogger;
import xal.extension.service.ServiceDirectory;
/**
* Main
*
* @author tap
*/
public class Main {
protected LoggerModel model;
/** Main Constructor */
public Main() {
model = new LoggerModel();
}
/**
* run the service by starting the logger
*/
protected void run() {
// get flag to test whether logging should be periodic and on-demand (default) or only on-demand
final boolean periodicLogging = !Boolean.getBoolean( "xal.logging.noperiod" );
if ( periodicLogging ) {
model.startLogging();
}
else {
System.out.println( "Warning! Periodic logging has been disabled due to command line flag. Will log on demand only." );
}
new LoggerService(model);
}
/**
* Main entry point to the service. Run the service.
* @param args The launch arguments to the service.
*/
static public void main(String[] args) {
new Main().run();
}
}
|
// ... existing code ...
* run the service by starting the logger
*/
protected void run() {
// get flag to test whether logging should be periodic and on-demand (default) or only on-demand
final boolean periodicLogging = !Boolean.getBoolean( "xal.logging.noperiod" );
if ( periodicLogging ) {
model.startLogging();
}
else {
System.out.println( "Warning! Periodic logging has been disabled due to command line flag. Will log on demand only." );
}
new LoggerService(model);
}
// ... rest of the code ...
|
71e97918c471b1cb88ed57148d21881222a4964f
|
rider/src/main/kotlin/com/jetbrains/rider/plugins/unity/asmdef/AsmDefJsonSchemeProviderFactory.kt
|
rider/src/main/kotlin/com/jetbrains/rider/plugins/unity/asmdef/AsmDefJsonSchemeProviderFactory.kt
|
package com.jetbrains.rider.plugins.unity.asmdef
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.VirtualFile
import com.jetbrains.jsonSchema.extension.JsonSchemaFileProvider
import com.jetbrains.jsonSchema.extension.JsonSchemaProviderFactory
import com.jetbrains.jsonSchema.extension.SchemaType
import com.jetbrains.rider.plugins.unity.ideaInterop.fileTypes.asmdef.AsmDefFileType
class AsmDefJsonSchemeProviderFactory : JsonSchemaProviderFactory {
override fun getProviders(p0: Project): MutableList<JsonSchemaFileProvider> {
return mutableListOf(
object : JsonSchemaFileProvider {
private val schemaFile = JsonSchemaProviderFactory.getResourceFile(this::class.java, "/schemas/unity/asmdef.json")
override fun isAvailable(file: VirtualFile) = file.fileType == AsmDefFileType
override fun getName() = "Unity Assembly Definition"
override fun getSchemaFile() = schemaFile
override fun getSchemaType() = SchemaType.embeddedSchema
})
}
}
|
package com.jetbrains.rider.plugins.unity.asmdef
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.VirtualFile
import com.jetbrains.jsonSchema.extension.JsonSchemaFileProvider
import com.jetbrains.jsonSchema.extension.JsonSchemaProviderFactory
import com.jetbrains.jsonSchema.extension.SchemaType
import com.jetbrains.rider.plugins.unity.ideaInterop.fileTypes.asmdef.AsmDefFileType
class AsmDefJsonSchemeProviderFactory : JsonSchemaProviderFactory {
override fun getProviders(p0: Project): MutableList<JsonSchemaFileProvider> {
return mutableListOf(
object : JsonSchemaFileProvider {
private val schemaFile = JsonSchemaProviderFactory.getResourceFile(this::class.java, "/schemas/unity/asmdef.json")
override fun isAvailable(file: VirtualFile) = file.fileType == AsmDefFileType
override fun getName() = "Unity Assembly Definition"
override fun getSchemaFile() = schemaFile
override fun getSchemaType() = SchemaType.embeddedSchema
override fun getRemoteSource() = "https://json.schemastore.org/asmdef.json"
})
}
}
|
Add link to asmdef schema on schemastore
|
Add link to asmdef schema on schemastore
|
Kotlin
|
apache-2.0
|
JetBrains/resharper-unity,JetBrains/resharper-unity,JetBrains/resharper-unity
|
kotlin
|
## Code Before:
package com.jetbrains.rider.plugins.unity.asmdef
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.VirtualFile
import com.jetbrains.jsonSchema.extension.JsonSchemaFileProvider
import com.jetbrains.jsonSchema.extension.JsonSchemaProviderFactory
import com.jetbrains.jsonSchema.extension.SchemaType
import com.jetbrains.rider.plugins.unity.ideaInterop.fileTypes.asmdef.AsmDefFileType
class AsmDefJsonSchemeProviderFactory : JsonSchemaProviderFactory {
override fun getProviders(p0: Project): MutableList<JsonSchemaFileProvider> {
return mutableListOf(
object : JsonSchemaFileProvider {
private val schemaFile = JsonSchemaProviderFactory.getResourceFile(this::class.java, "/schemas/unity/asmdef.json")
override fun isAvailable(file: VirtualFile) = file.fileType == AsmDefFileType
override fun getName() = "Unity Assembly Definition"
override fun getSchemaFile() = schemaFile
override fun getSchemaType() = SchemaType.embeddedSchema
})
}
}
## Instruction:
Add link to asmdef schema on schemastore
## Code After:
package com.jetbrains.rider.plugins.unity.asmdef
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.VirtualFile
import com.jetbrains.jsonSchema.extension.JsonSchemaFileProvider
import com.jetbrains.jsonSchema.extension.JsonSchemaProviderFactory
import com.jetbrains.jsonSchema.extension.SchemaType
import com.jetbrains.rider.plugins.unity.ideaInterop.fileTypes.asmdef.AsmDefFileType
class AsmDefJsonSchemeProviderFactory : JsonSchemaProviderFactory {
override fun getProviders(p0: Project): MutableList<JsonSchemaFileProvider> {
return mutableListOf(
object : JsonSchemaFileProvider {
private val schemaFile = JsonSchemaProviderFactory.getResourceFile(this::class.java, "/schemas/unity/asmdef.json")
override fun isAvailable(file: VirtualFile) = file.fileType == AsmDefFileType
override fun getName() = "Unity Assembly Definition"
override fun getSchemaFile() = schemaFile
override fun getSchemaType() = SchemaType.embeddedSchema
override fun getRemoteSource() = "https://json.schemastore.org/asmdef.json"
})
}
}
|
# ... existing code ...
override fun getName() = "Unity Assembly Definition"
override fun getSchemaFile() = schemaFile
override fun getSchemaType() = SchemaType.embeddedSchema
override fun getRemoteSource() = "https://json.schemastore.org/asmdef.json"
})
}
}
# ... rest of the code ...
|
b6d178b4dcb894180356c445b8cde644e7dc4327
|
Squirrel/SQRLShipItLauncher.h
|
Squirrel/SQRLShipItLauncher.h
|
//
// SQRLShipItLauncher.h
// Squirrel
//
// Created by Justin Spahr-Summers on 2013-08-12.
// Copyright (c) 2013 GitHub. All rights reserved.
//
#import <Foundation/Foundation.h>
// The domain for errors originating within SQRLShipItLauncher.
extern NSString * const SQRLShipItLauncherErrorDomain;
// The ShipIt service could not be started.
extern const NSInteger SQRLShipItLauncherErrorCouldNotStartService;
// Responsible for launching the ShipIt service to actually install an update.
@interface SQRLShipItLauncher : NSObject
// Attempts to launch the ShipIt service.
//
// error - If not NULL, set to any error that occurs.
//
// Returns the XPC connection established, or NULL if an error occurs. If an
// error occurs in the connection, it will be automatically released. Retain it
// if you'll still need it after that point.
- (xpc_connection_t)launch:(NSError **)error;
@end
|
//
// SQRLShipItLauncher.h
// Squirrel
//
// Created by Justin Spahr-Summers on 2013-08-12.
// Copyright (c) 2013 GitHub. All rights reserved.
//
#import <Foundation/Foundation.h>
// The domain for errors originating within SQRLShipItLauncher.
extern NSString * const SQRLShipItLauncherErrorDomain;
// The ShipIt service could not be started.
extern const NSInteger SQRLShipItLauncherErrorCouldNotStartService;
// Responsible for launching the ShipIt service to actually install an update.
@interface SQRLShipItLauncher : NSObject
// Attempts to launch the ShipIt service.
//
// error - If not NULL, set to any error that occurs.
//
// Returns the XPC connection established, or NULL if an error occurs. The
// connection will be automatically released once it has completed or received
// an error. Retain the connection if you'll still need it after that point.
- (xpc_connection_t)launch:(NSError **)error;
@end
|
Clarify XPC connection lifecycle for -launch:
|
Clarify XPC connection lifecycle for -launch:
|
C
|
mit
|
emiscience/Squirrel.Mac,EdZava/Squirrel.Mac,Squirrel/Squirrel.Mac,EdZava/Squirrel.Mac,Squirrel/Squirrel.Mac,emiscience/Squirrel.Mac,EdZava/Squirrel.Mac,emiscience/Squirrel.Mac,Squirrel/Squirrel.Mac
|
c
|
## Code Before:
//
// SQRLShipItLauncher.h
// Squirrel
//
// Created by Justin Spahr-Summers on 2013-08-12.
// Copyright (c) 2013 GitHub. All rights reserved.
//
#import <Foundation/Foundation.h>
// The domain for errors originating within SQRLShipItLauncher.
extern NSString * const SQRLShipItLauncherErrorDomain;
// The ShipIt service could not be started.
extern const NSInteger SQRLShipItLauncherErrorCouldNotStartService;
// Responsible for launching the ShipIt service to actually install an update.
@interface SQRLShipItLauncher : NSObject
// Attempts to launch the ShipIt service.
//
// error - If not NULL, set to any error that occurs.
//
// Returns the XPC connection established, or NULL if an error occurs. If an
// error occurs in the connection, it will be automatically released. Retain it
// if you'll still need it after that point.
- (xpc_connection_t)launch:(NSError **)error;
@end
## Instruction:
Clarify XPC connection lifecycle for -launch:
## Code After:
//
// SQRLShipItLauncher.h
// Squirrel
//
// Created by Justin Spahr-Summers on 2013-08-12.
// Copyright (c) 2013 GitHub. All rights reserved.
//
#import <Foundation/Foundation.h>
// The domain for errors originating within SQRLShipItLauncher.
extern NSString * const SQRLShipItLauncherErrorDomain;
// The ShipIt service could not be started.
extern const NSInteger SQRLShipItLauncherErrorCouldNotStartService;
// Responsible for launching the ShipIt service to actually install an update.
@interface SQRLShipItLauncher : NSObject
// Attempts to launch the ShipIt service.
//
// error - If not NULL, set to any error that occurs.
//
// Returns the XPC connection established, or NULL if an error occurs. The
// connection will be automatically released once it has completed or received
// an error. Retain the connection if you'll still need it after that point.
- (xpc_connection_t)launch:(NSError **)error;
@end
|
# ... existing code ...
//
// error - If not NULL, set to any error that occurs.
//
// Returns the XPC connection established, or NULL if an error occurs. The
// connection will be automatically released once it has completed or received
// an error. Retain the connection if you'll still need it after that point.
- (xpc_connection_t)launch:(NSError **)error;
@end
# ... rest of the code ...
|
50d447a546cd939594aeb8fda84167cef27f0d5e
|
msmbuilder/scripts/msmb.py
|
msmbuilder/scripts/msmb.py
|
"""Statistical models for biomolecular dynamics"""
from __future__ import print_function, absolute_import, division
import sys
from ..cmdline import App
from ..commands import *
from ..version import version
# the commands register themselves when they're imported
class MSMBuilderApp(App):
def _subcommands(self):
cmds = super(MSMBuilderApp, self)._subcommands()
# sort the commands in some arbitrary order.
return sorted(cmds, key=lambda e: ''.join(x.__name__ for x in e.mro()))
def main():
try:
app = MSMBuilderApp(name='MSMBuilder', description=__doc__)
app.start()
except RuntimeError as e:
sys.exit("Error: %s" % e)
except Exception as e:
message = """\
An unexpected error has occurred with MSMBuilder (version %s), please
consider sending the following traceback to MSMBuilder GitHub issue tracker at:
https://github.com/msmbuilder/msmbuilder/issues
"""
print(message % version, file=sys.stderr)
raise # as if we did not catch it
if __name__ == '__main__':
main()
|
"""Statistical models for biomolecular dynamics"""
from __future__ import print_function, absolute_import, division
import sys
from ..cmdline import App
from ..commands import *
from ..version import version
# the commands register themselves when they're imported
# Load external commands which register themselves
# with entry point msmbuilder.commands
from pkg_resources import iter_entry_points
for ep in iter_entry_points("msmbuilder.commands"):
external_command = ep.load()
# Some groups start with numbers for ordering
# Some start with descriptions e.g. "MSM"
# Let's set the group to start with ZZZ to put plugins last.
external_command._group = "ZZZ-External_" + external_command._group
class MSMBuilderApp(App):
pass
def main():
try:
app = MSMBuilderApp(name='MSMBuilder', description=__doc__)
app.start()
except RuntimeError as e:
sys.exit("Error: %s" % e)
except Exception as e:
message = """\
An unexpected error has occurred with MSMBuilder (version %s), please
consider sending the following traceback to MSMBuilder GitHub issue tracker at:
https://github.com/msmbuilder/msmbuilder/issues
"""
print(message % version, file=sys.stderr)
raise # as if we did not catch it
if __name__ == '__main__':
main()
|
Load plugins from entry point
|
Load plugins from entry point
|
Python
|
lgpl-2.1
|
brookehus/msmbuilder,stephenliu1989/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,dr-nate/msmbuilder,dotsdl/msmbuilder,peastman/msmbuilder,msultan/msmbuilder,mpharrigan/mixtape,stephenliu1989/msmbuilder,cxhernandez/msmbuilder,rmcgibbo/msmbuilder,cxhernandez/msmbuilder,msultan/msmbuilder,brookehus/msmbuilder,stephenliu1989/msmbuilder,msmbuilder/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,dr-nate/msmbuilder,Eigenstate/msmbuilder,brookehus/msmbuilder,dr-nate/msmbuilder,dotsdl/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,mpharrigan/mixtape,Eigenstate/msmbuilder,msmbuilder/msmbuilder,dotsdl/msmbuilder,rmcgibbo/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,rmcgibbo/msmbuilder,dotsdl/msmbuilder,stephenliu1989/msmbuilder,rmcgibbo/msmbuilder,mpharrigan/mixtape,msmbuilder/msmbuilder,msultan/msmbuilder,dr-nate/msmbuilder,dr-nate/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,rafwiewiora/msmbuilder,rafwiewiora/msmbuilder,peastman/msmbuilder,msmbuilder/msmbuilder,cxhernandez/msmbuilder,rafwiewiora/msmbuilder,rafwiewiora/msmbuilder,Eigenstate/msmbuilder,msmbuilder/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,mpharrigan/mixtape,Eigenstate/msmbuilder
|
python
|
## Code Before:
"""Statistical models for biomolecular dynamics"""
from __future__ import print_function, absolute_import, division
import sys
from ..cmdline import App
from ..commands import *
from ..version import version
# the commands register themselves when they're imported
class MSMBuilderApp(App):
def _subcommands(self):
cmds = super(MSMBuilderApp, self)._subcommands()
# sort the commands in some arbitrary order.
return sorted(cmds, key=lambda e: ''.join(x.__name__ for x in e.mro()))
def main():
try:
app = MSMBuilderApp(name='MSMBuilder', description=__doc__)
app.start()
except RuntimeError as e:
sys.exit("Error: %s" % e)
except Exception as e:
message = """\
An unexpected error has occurred with MSMBuilder (version %s), please
consider sending the following traceback to MSMBuilder GitHub issue tracker at:
https://github.com/msmbuilder/msmbuilder/issues
"""
print(message % version, file=sys.stderr)
raise # as if we did not catch it
if __name__ == '__main__':
main()
## Instruction:
Load plugins from entry point
## Code After:
"""Statistical models for biomolecular dynamics"""
from __future__ import print_function, absolute_import, division
import sys
from ..cmdline import App
from ..commands import *
from ..version import version
# the commands register themselves when they're imported
# Load external commands which register themselves
# with entry point msmbuilder.commands
from pkg_resources import iter_entry_points
for ep in iter_entry_points("msmbuilder.commands"):
external_command = ep.load()
# Some groups start with numbers for ordering
# Some start with descriptions e.g. "MSM"
# Let's set the group to start with ZZZ to put plugins last.
external_command._group = "ZZZ-External_" + external_command._group
class MSMBuilderApp(App):
pass
def main():
try:
app = MSMBuilderApp(name='MSMBuilder', description=__doc__)
app.start()
except RuntimeError as e:
sys.exit("Error: %s" % e)
except Exception as e:
message = """\
An unexpected error has occurred with MSMBuilder (version %s), please
consider sending the following traceback to MSMBuilder GitHub issue tracker at:
https://github.com/msmbuilder/msmbuilder/issues
"""
print(message % version, file=sys.stderr)
raise # as if we did not catch it
if __name__ == '__main__':
main()
|
# ... existing code ...
from ..version import version
# the commands register themselves when they're imported
# Load external commands which register themselves
# with entry point msmbuilder.commands
from pkg_resources import iter_entry_points
for ep in iter_entry_points("msmbuilder.commands"):
external_command = ep.load()
# Some groups start with numbers for ordering
# Some start with descriptions e.g. "MSM"
# Let's set the group to start with ZZZ to put plugins last.
external_command._group = "ZZZ-External_" + external_command._group
class MSMBuilderApp(App):
pass
def main():
# ... modified code ...
print(message % version, file=sys.stderr)
raise # as if we did not catch it
if __name__ == '__main__':
main()
# ... rest of the code ...
|
b0c93651c6d0d48394041ea61dea3774f6e017af
|
ircnotifier/redis2irc.py
|
ircnotifier/redis2irc.py
|
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
|
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
Join channels where the bot isn't already on
|
Join channels where the bot isn't already on
|
Python
|
apache-2.0
|
wikimedia/operations-software-ircyall,yuvipanda/ircnotifier
|
python
|
## Code Before:
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
## Instruction:
Join channels where the bot isn't already on
## Code After:
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
# ... existing code ...
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
# ... modified code ...
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
# ... rest of the code ...
|
46ce6e626352d845a8b3b151f58837d8200a82e9
|
src/com/algorithms/tree/BST.java
|
src/com/algorithms/tree/BST.java
|
package com.algorithms.tree;
public class BST<Key extends Comparable<Key>, Value> {
}
|
package com.algorithms.tree;
public class BST<Key extends Comparable<Key>, Value> {
private Node root;
private class Node {
private Key key;
private Value val;
private Node left, right;
public Node(Key key, Value val) {
this.key = key;
this.val = val;
}
}
public void put(Key key, Value val) {}
public Value get(Key key) {}
public void delete(Key key) {}
public Iterable<Key> iterator() {}
}
|
Create Node class and other skeleton code
|
Create Node class and other skeleton code
|
Java
|
mit
|
SkullTech/algorithms-princeton
|
java
|
## Code Before:
package com.algorithms.tree;
public class BST<Key extends Comparable<Key>, Value> {
}
## Instruction:
Create Node class and other skeleton code
## Code After:
package com.algorithms.tree;
public class BST<Key extends Comparable<Key>, Value> {
private Node root;
private class Node {
private Key key;
private Value val;
private Node left, right;
public Node(Key key, Value val) {
this.key = key;
this.val = val;
}
}
public void put(Key key, Value val) {}
public Value get(Key key) {}
public void delete(Key key) {}
public Iterable<Key> iterator() {}
}
|
...
package com.algorithms.tree;
public class BST<Key extends Comparable<Key>, Value> {
private Node root;
private class Node {
private Key key;
private Value val;
private Node left, right;
public Node(Key key, Value val) {
this.key = key;
this.val = val;
}
}
public void put(Key key, Value val) {}
public Value get(Key key) {}
public void delete(Key key) {}
public Iterable<Key> iterator() {}
}
...
|
b6db7abfd59a1b97fbb4d1b867e3316c029c94ff
|
spec/Report_S06_spec.py
|
spec/Report_S06_spec.py
|
from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
result = self.report[key].values
expect(result).to(equal(expected_result))
# result_filename = '{}_result.txt'.format(self.data_filename)
#
# with open(result_filename) as result_file:
# result_string = result_file.read()
# self.expected_result = literal_eval(result_string)
#
# result = self.report.values
#
# expect(result).to(equal(self.expected_result))
|
from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
for cnc in self.report[key].concentrators:
if cnc.meters:
for meter in cnc.meters:
for value in meter.values:
result.append(value)
warnings.append(meter.warnings)
print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
result, expected_result, warnings))
expect(result).to(equal(expected_result))
expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
"Meter(ZIV42553686). Thrown exception: "
"object of type 'NoneType' has no len()"], []]
expect(warnings).to(equal(expected_warnings))
|
TEST for correct an with errors S06 report
|
TEST for correct an with errors S06 report
|
Python
|
agpl-3.0
|
gisce/primestg
|
python
|
## Code Before:
from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
result = self.report[key].values
expect(result).to(equal(expected_result))
# result_filename = '{}_result.txt'.format(self.data_filename)
#
# with open(result_filename) as result_file:
# result_string = result_file.read()
# self.expected_result = literal_eval(result_string)
#
# result = self.report.values
#
# expect(result).to(equal(self.expected_result))
## Instruction:
TEST for correct an with errors S06 report
## Code After:
from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
for cnc in self.report[key].concentrators:
if cnc.meters:
for meter in cnc.meters:
for value in meter.values:
result.append(value)
warnings.append(meter.warnings)
print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
result, expected_result, warnings))
expect(result).to(equal(expected_result))
expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
"Meter(ZIV42553686). Thrown exception: "
"object of type 'NoneType' has no len()"], []]
expect(warnings).to(equal(expected_warnings))
|
# ... existing code ...
self.data_filenames = [
'spec/data/S06.xml',
'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
# ... modified code ...
with it('generates the expected results for the whole report'):
result_filenames = []
warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
for cnc in self.report[key].concentrators:
if cnc.meters:
for meter in cnc.meters:
for value in meter.values:
result.append(value)
warnings.append(meter.warnings)
print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
result, expected_result, warnings))
expect(result).to(equal(expected_result))
expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
"Meter(ZIV42553686). Thrown exception: "
"object of type 'NoneType' has no len()"], []]
expect(warnings).to(equal(expected_warnings))
# ... rest of the code ...
|
c723875e377f92710bae4e55fbafb7ba8ea6220c
|
src/boundaryCondition.h
|
src/boundaryCondition.h
|
/***************************************************************************//**
* \file boundaryCondition.h
* \author Krishnan, A. ([email protected])
* \brief Definition of the class \c boundaryCondition
*/
#pragma once
#include <string>
#include <sstream>
#include "types.h"
#include "parameterDB.h"
/**
* \class boundaryCondition
* \brief Store the boundary conditions for a given system
*/
class boundaryCondition
{
public:
bcType type; ///< type of boundary condition
real value; ///< numerical value associated with the boundary condition
/**
* \brief Constructor of the class \c boundaryCondition.
*
* Initialize with a Dirichlet-type boundary condition
* with a value sets to zero.
*
*/
boundaryCondition() : type(DIRICHLET), value(0) {};
/**
* \brief Other constructor of the class \c boundaryCondition.
*
* Initialize with a given boundary condition type
* and a given value.
*
*/
boundaryCondition(bcType _type, real _value) : type(_type), value(_value) {};
/*const char *print()
{
std::stringstream ss;
ss << toString(this->type);
ss << " : ";
ss << this->value;
std::string st = ss.str();
//std::cout << st << std::endl;
return ss.str().c_str();
}*/
};
|
/***************************************************************************//**
* \file boundaryCondition.h
* \author Anush Krishnan ([email protected])
* \brief Definition of the class \c boundaryCondition.
*/
#pragma once
#include <string>
#include <sstream>
#include "types.h"
#include "parameterDB.h"
/**
* \class boundaryCondition
* \brief Stores the boundary conditions for a given system.
*/
class boundaryCondition
{
public:
bcType type; ///< type of boundary condition
real value; ///< numerical value associated with the boundary condition
/**
* \brief Constructor of the class \c boundaryCondition.
*
* Boundary condition initialized with a Dirichlet-type with
* with a value sets to zero.
*
*/
boundaryCondition() : type(DIRICHLET), value(0) {};
/**
* \brief Other constructor of the class \c boundaryCondition.
*
* Boundary condition initialized with a given type and a given value.
*
*/
boundaryCondition(bcType _type, real _value) : type(_type), value(_value) {};
/*const char *print()
{
std::stringstream ss;
ss << toString(this->type);
ss << " : ";
ss << this->value;
std::string st = ss.str();
//std::cout << st << std::endl;
return ss.str().c_str();
}*/
};
|
Update Doxygen documentation with conventions
|
Update Doxygen documentation with conventions
|
C
|
mit
|
barbagroup/cuIBM,barbagroup/cuIBM,barbagroup/cuIBM
|
c
|
## Code Before:
/***************************************************************************//**
* \file boundaryCondition.h
* \author Krishnan, A. ([email protected])
* \brief Definition of the class \c boundaryCondition
*/
#pragma once
#include <string>
#include <sstream>
#include "types.h"
#include "parameterDB.h"
/**
* \class boundaryCondition
* \brief Store the boundary conditions for a given system
*/
class boundaryCondition
{
public:
bcType type; ///< type of boundary condition
real value; ///< numerical value associated with the boundary condition
/**
* \brief Constructor of the class \c boundaryCondition.
*
* Initialize with a Dirichlet-type boundary condition
* with a value sets to zero.
*
*/
boundaryCondition() : type(DIRICHLET), value(0) {};
/**
* \brief Other constructor of the class \c boundaryCondition.
*
* Initialize with a given boundary condition type
* and a given value.
*
*/
boundaryCondition(bcType _type, real _value) : type(_type), value(_value) {};
/*const char *print()
{
std::stringstream ss;
ss << toString(this->type);
ss << " : ";
ss << this->value;
std::string st = ss.str();
//std::cout << st << std::endl;
return ss.str().c_str();
}*/
};
## Instruction:
Update Doxygen documentation with conventions
## Code After:
/***************************************************************************//**
* \file boundaryCondition.h
* \author Anush Krishnan ([email protected])
* \brief Definition of the class \c boundaryCondition.
*/
#pragma once
#include <string>
#include <sstream>
#include "types.h"
#include "parameterDB.h"
/**
* \class boundaryCondition
* \brief Stores the boundary conditions for a given system.
*/
class boundaryCondition
{
public:
bcType type; ///< type of boundary condition
real value; ///< numerical value associated with the boundary condition
/**
* \brief Constructor of the class \c boundaryCondition.
*
* Boundary condition initialized with a Dirichlet-type with
* with a value sets to zero.
*
*/
boundaryCondition() : type(DIRICHLET), value(0) {};
/**
* \brief Other constructor of the class \c boundaryCondition.
*
* Boundary condition initialized with a given type and a given value.
*
*/
boundaryCondition(bcType _type, real _value) : type(_type), value(_value) {};
/*const char *print()
{
std::stringstream ss;
ss << toString(this->type);
ss << " : ";
ss << this->value;
std::string st = ss.str();
//std::cout << st << std::endl;
return ss.str().c_str();
}*/
};
|
...
/***************************************************************************//**
* \file boundaryCondition.h
* \author Anush Krishnan ([email protected])
* \brief Definition of the class \c boundaryCondition.
*/
#pragma once
...
#include "types.h"
#include "parameterDB.h"
/**
* \class boundaryCondition
* \brief Stores the boundary conditions for a given system.
*/
class boundaryCondition
{
public:
...
real value; ///< numerical value associated with the boundary condition
/**
* \brief Constructor of the class \c boundaryCondition.
*
* Boundary condition initialized with a Dirichlet-type with
* with a value sets to zero.
*
*/
boundaryCondition() : type(DIRICHLET), value(0) {};
/**
* \brief Other constructor of the class \c boundaryCondition.
*
* Boundary condition initialized with a given type and a given value.
*
*/
boundaryCondition(bcType _type, real _value) : type(_type), value(_value) {};
/*const char *print()
...
|
5efddf26176ac778556a3568bf97c2e70daac866
|
anchorhub/settings/default_settings.py
|
anchorhub/settings/default_settings.py
|
WRAPPER = "{ }"
INPUT = "."
OUTPUT = "out-anchorhub"
ARGPARSER = {
"description": "anchorhub parses through Markdown files and precompiles "
"links to specially formatted anchors."
}
ARGPARSE_INPUT = {
"help": "Path of directory tree to be parsed",
}
ARGPARSE_OUTPUT = {
"help": "Desired output location (default is \"" + OUTPUT + "\")",
"default": OUTPUT
}
ARGPARSE_OVERWRITE = {
"help": "Overwrite input files; ignore output location"
}
ARGPARSE_EXTENSION = {
"help": "Indicate which file extensions to search and run anchorhub on.",
"default": [".md"]
}
ARGPARSE_WRAPPER = {
"help": "Specify custom wrapper format (default is \"" + WRAPPER + "\")",
"default": WRAPPER
}
|
WRAPPER = '{ }'
INPUT = '.'
OUTPUT = 'out-anchorhub'
ARGPARSER = {
'description': "anchorhub parses through Markdown files and precompiles "
"links to specially formatted anchors."
}
ARGPARSE_INPUT = {
'help': "Path of directory tree to be parsed",
}
ARGPARSE_OUTPUT = {
'help': "Desired output location (default is \"" + OUTPUT + "\")",
'default': OUTPUT
}
ARGPARSE_OVERWRITE = {
'help': "Overwrite input files; ignore output location"
}
ARGPARSE_EXTENSION = {
'help': "Indicate which file extensions to search and run anchorhub on.",
'default': [".md"]
}
ARGPARSE_WRAPPER = {
'help': "Specify custom wrapper format (default is \"" + WRAPPER + "\")",
'default': WRAPPER
}
|
Replace many double quotes with single quotes
|
Replace many double quotes with single quotes
|
Python
|
apache-2.0
|
samjabrahams/anchorhub
|
python
|
## Code Before:
WRAPPER = "{ }"
INPUT = "."
OUTPUT = "out-anchorhub"
ARGPARSER = {
"description": "anchorhub parses through Markdown files and precompiles "
"links to specially formatted anchors."
}
ARGPARSE_INPUT = {
"help": "Path of directory tree to be parsed",
}
ARGPARSE_OUTPUT = {
"help": "Desired output location (default is \"" + OUTPUT + "\")",
"default": OUTPUT
}
ARGPARSE_OVERWRITE = {
"help": "Overwrite input files; ignore output location"
}
ARGPARSE_EXTENSION = {
"help": "Indicate which file extensions to search and run anchorhub on.",
"default": [".md"]
}
ARGPARSE_WRAPPER = {
"help": "Specify custom wrapper format (default is \"" + WRAPPER + "\")",
"default": WRAPPER
}
## Instruction:
Replace many double quotes with single quotes
## Code After:
WRAPPER = '{ }'
INPUT = '.'
OUTPUT = 'out-anchorhub'
ARGPARSER = {
'description': "anchorhub parses through Markdown files and precompiles "
"links to specially formatted anchors."
}
ARGPARSE_INPUT = {
'help': "Path of directory tree to be parsed",
}
ARGPARSE_OUTPUT = {
'help': "Desired output location (default is \"" + OUTPUT + "\")",
'default': OUTPUT
}
ARGPARSE_OVERWRITE = {
'help': "Overwrite input files; ignore output location"
}
ARGPARSE_EXTENSION = {
'help': "Indicate which file extensions to search and run anchorhub on.",
'default': [".md"]
}
ARGPARSE_WRAPPER = {
'help': "Specify custom wrapper format (default is \"" + WRAPPER + "\")",
'default': WRAPPER
}
|
// ... existing code ...
WRAPPER = '{ }'
INPUT = '.'
OUTPUT = 'out-anchorhub'
ARGPARSER = {
'description': "anchorhub parses through Markdown files and precompiles "
"links to specially formatted anchors."
}
ARGPARSE_INPUT = {
'help': "Path of directory tree to be parsed",
}
ARGPARSE_OUTPUT = {
'help': "Desired output location (default is \"" + OUTPUT + "\")",
'default': OUTPUT
}
ARGPARSE_OVERWRITE = {
'help': "Overwrite input files; ignore output location"
}
ARGPARSE_EXTENSION = {
'help': "Indicate which file extensions to search and run anchorhub on.",
'default': [".md"]
}
ARGPARSE_WRAPPER = {
'help': "Specify custom wrapper format (default is \"" + WRAPPER + "\")",
'default': WRAPPER
}
// ... rest of the code ...
|
cbdfc1b1cb4162256538576cabe2b6832aa83bca
|
django_mysqlpool/__init__.py
|
django_mysqlpool/__init__.py
|
from functools import wraps
from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
Fix circular import when used with other add-ons that import django.db
|
Fix circular import when used with other add-ons that import django.db
eg sorl_thumbnail:
Traceback (most recent call last):
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/bin/manage", line 40, in <module>
sys.exit(manage.main())
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/retrans/manage.py", line 15, in main
execute_manager(settings)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 438, in execute_manager
utility.execute()
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 379, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 191, in run_from_argv
self.execute(*args, **options.__dict__)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 209, in execute
translation.activate('en-us')
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/__init__.py", line 100, in activate
return _trans.activate(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 202, in activate
_active.value = translation(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 185, in translation
default_translation = _fetch(settings.LANGUAGE_CODE)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 162, in _fetch
app = import_module(appname)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/importlib.py", line 35, in import_module
__import__(name)
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/__init__.py", line 1, in <module>
from sorl.thumbnail.fields import ImageField
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/fields.py", line 2, in <module>
from django.db import models
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/__init__.py", line 78, in <module>
connection = connections[DEFAULT_DB_ALIAS]
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 94, in __getitem__
backend = load_backend(db['ENGINE'])
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 47, in load_backend
if backend_name not in available_backends:
django.core.exceptions.ImproperlyConfigured: 'django_mysqlpool.backends.mysqlpool' isn't an available database backend.
Try using django.db.backends.XXX, where XXX is one of:
'dummy', 'mysql', 'oracle', 'postgresql', 'postgresql_psycopg2', 'sqlite3'
Error was: cannot import name connection
|
Python
|
mit
|
smartfile/django-mysqlpool
|
python
|
## Code Before:
from functools import wraps
from django.db import connection
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
## Instruction:
Fix circular import when used with other add-ons that import django.db
eg sorl_thumbnail:
Traceback (most recent call last):
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/bin/manage", line 40, in <module>
sys.exit(manage.main())
File "/home/rpatterson/src/work/retrans/src/ReTransDjango/retrans/manage.py", line 15, in main
execute_manager(settings)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 438, in execute_manager
utility.execute()
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 379, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 191, in run_from_argv
self.execute(*args, **options.__dict__)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 209, in execute
translation.activate('en-us')
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/__init__.py", line 100, in activate
return _trans.activate(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 202, in activate
_active.value = translation(language)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 185, in translation
default_translation = _fetch(settings.LANGUAGE_CODE)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 162, in _fetch
app = import_module(appname)
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/importlib.py", line 35, in import_module
__import__(name)
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/__init__.py", line 1, in <module>
from sorl.thumbnail.fields import ImageField
File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/fields.py", line 2, in <module>
from django.db import models
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/__init__.py", line 78, in <module>
connection = connections[DEFAULT_DB_ALIAS]
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 94, in __getitem__
backend = load_backend(db['ENGINE'])
File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 47, in load_backend
if backend_name not in available_backends:
django.core.exceptions.ImproperlyConfigured: 'django_mysqlpool.backends.mysqlpool' isn't an available database backend.
Try using django.db.backends.XXX, where XXX is one of:
'dummy', 'mysql', 'oracle', 'postgresql', 'postgresql_psycopg2', 'sqlite3'
Error was: cannot import name connection
## Code After:
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
finally:
connection.close()
return wrapper
|
...
from functools import wraps
def auto_close_db(f):
"Ensures the database connection is closed when the function returns."
from django.db import connection
@wraps(f)
def wrapper(*args, **kwargs):
try:
...
|
f3994034c767f5c181d09bdb08e395eb11dfe18e
|
tests/embedded/main.c
|
tests/embedded/main.c
|
/*
* Copyright © 2009 CNRS, INRIA, Université Bordeaux 1
* Copyright © 2009 Cisco Systems, Inc. All rights reserved.
* See COPYING in top-level directory.
*/
#include <hwloc.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
mytest_hwloc_topology_t topology;
unsigned depth;
hwloc_cpuset_t cpu_set;
/* Just call a bunch of functions to see if we can link and run */
cpu_set = mytest_hwloc_cpuset_alloc();
mytest_hwloc_topology_init(&topology);
mytest_hwloc_topology_load(topology);
depth = mytest_hwloc_topology_get_depth(topology);
printf("Max depth: %u\n", depth);
mytest_hwloc_topology_destroy(topology);
mytest_hwloc_cpuset_free(cpu_set);
return 0;
}
|
/*
* Copyright © 2009 CNRS, INRIA, Université Bordeaux 1
* Copyright © 2009 Cisco Systems, Inc. All rights reserved.
* See COPYING in top-level directory.
*/
#include <hwloc.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
mytest_hwloc_topology_t topology;
unsigned depth;
hwloc_cpuset_t cpu_set;
/* Just call a bunch of functions to see if we can link and run */
printf("*** Test 1: cpuset alloc\n");
cpu_set = mytest_hwloc_cpuset_alloc();
printf("*** Test 2: topology init\n");
mytest_hwloc_topology_init(&topology);
printf("*** Test 3: topology load\n");
mytest_hwloc_topology_load(topology);
printf("*** Test 4: topology get depth\n");
depth = mytest_hwloc_topology_get_depth(topology);
printf(" Max depth: %u\n", depth);
printf("*** Test 5: topology destroy\n");
mytest_hwloc_topology_destroy(topology);
printf("*** Test 6: cpuset free\n");
mytest_hwloc_cpuset_free(cpu_set);
return 0;
}
|
Add some more print statements to this test, just to help differentiate the output when debugging is enabled
|
Add some more print statements to this test, just to help
differentiate the output when debugging is enabled
git-svn-id: 14be032f8f42541b1a281b51ae8ea69814daf20e@1752 4b44e086-7f34-40ce-a3bd-00e031736276
|
C
|
bsd-3-clause
|
BlueBrain/hwloc,BlueBrain/hwloc,BlueBrain/hwloc,BlueBrain/hwloc
|
c
|
## Code Before:
/*
* Copyright © 2009 CNRS, INRIA, Université Bordeaux 1
* Copyright © 2009 Cisco Systems, Inc. All rights reserved.
* See COPYING in top-level directory.
*/
#include <hwloc.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
mytest_hwloc_topology_t topology;
unsigned depth;
hwloc_cpuset_t cpu_set;
/* Just call a bunch of functions to see if we can link and run */
cpu_set = mytest_hwloc_cpuset_alloc();
mytest_hwloc_topology_init(&topology);
mytest_hwloc_topology_load(topology);
depth = mytest_hwloc_topology_get_depth(topology);
printf("Max depth: %u\n", depth);
mytest_hwloc_topology_destroy(topology);
mytest_hwloc_cpuset_free(cpu_set);
return 0;
}
## Instruction:
Add some more print statements to this test, just to help
differentiate the output when debugging is enabled
git-svn-id: 14be032f8f42541b1a281b51ae8ea69814daf20e@1752 4b44e086-7f34-40ce-a3bd-00e031736276
## Code After:
/*
* Copyright © 2009 CNRS, INRIA, Université Bordeaux 1
* Copyright © 2009 Cisco Systems, Inc. All rights reserved.
* See COPYING in top-level directory.
*/
#include <hwloc.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
mytest_hwloc_topology_t topology;
unsigned depth;
hwloc_cpuset_t cpu_set;
/* Just call a bunch of functions to see if we can link and run */
printf("*** Test 1: cpuset alloc\n");
cpu_set = mytest_hwloc_cpuset_alloc();
printf("*** Test 2: topology init\n");
mytest_hwloc_topology_init(&topology);
printf("*** Test 3: topology load\n");
mytest_hwloc_topology_load(topology);
printf("*** Test 4: topology get depth\n");
depth = mytest_hwloc_topology_get_depth(topology);
printf(" Max depth: %u\n", depth);
printf("*** Test 5: topology destroy\n");
mytest_hwloc_topology_destroy(topology);
printf("*** Test 6: cpuset free\n");
mytest_hwloc_cpuset_free(cpu_set);
return 0;
}
|
...
/* Just call a bunch of functions to see if we can link and run */
printf("*** Test 1: cpuset alloc\n");
cpu_set = mytest_hwloc_cpuset_alloc();
printf("*** Test 2: topology init\n");
mytest_hwloc_topology_init(&topology);
printf("*** Test 3: topology load\n");
mytest_hwloc_topology_load(topology);
printf("*** Test 4: topology get depth\n");
depth = mytest_hwloc_topology_get_depth(topology);
printf(" Max depth: %u\n", depth);
printf("*** Test 5: topology destroy\n");
mytest_hwloc_topology_destroy(topology);
printf("*** Test 6: cpuset free\n");
mytest_hwloc_cpuset_free(cpu_set);
return 0;
...
|
c2859bd8da741862ee01a276a1350fb4a5931dbc
|
data_access.py
|
data_access.py
|
import sys
import mysql.connector
def insert():
cursor = connection.cursor()
try:
cursor.execute("drop table employees")
except:
pass
cursor.execute("create table employees (id integer primary key, name text)")
cursor.close()
print("Inserting employees...")
for n in xrange(0, 10000):
cursor = connection.cursor()
cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" %
(n, n))
connection.commit()
cursor.close()
def select():
print("Selecting employees...")
while True:
cursor = connection.cursor()
cursor.execute("select * from employees where name like '%1417773'")
for row in cursor:
pass
cursor.close()
connection = mysql.connector.connect(host='localhost', database='test')
if "insert" in sys.argv:
while True:
insert()
elif "insert_once" in sys.argv:
insert()
elif "select" in sys.argv:
select()
else:
print("USAGE: data_access.py <insert|insert_once|select>")
connection.close()
|
from random import randint
import sys
import mysql.connector
NUM_EMPLOYEES = 10000
def insert():
cursor = connection.cursor()
try:
cursor.execute("drop table employees")
except:
pass
cursor.execute("create table employees (id integer primary key, name text)")
cursor.close()
print("Inserting employees...")
for n in xrange(0, NUM_EMPLOYEES):
cursor = connection.cursor()
cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" %
(n, n))
connection.commit()
cursor.close()
def select():
print("Selecting employees...")
while True:
cursor = connection.cursor()
cursor.execute("select * from employees where name like '%%%d'" % randint(0, NUM_EMPLOYEES))
for row in cursor:
pass
cursor.close()
connection = mysql.connector.connect(host='localhost', database='test')
if "insert" in sys.argv:
while True:
insert()
elif "insert_once" in sys.argv:
insert()
elif "select" in sys.argv:
select()
else:
print("USAGE: data_access.py <insert|insert_once|select>")
connection.close()
|
Change data access script to issue SELECTs that actually return a value
|
Change data access script to issue SELECTs that actually return a value
This makes the part about tracing the SQL statements and tracing the
number of rows returned a little more interesting.
|
Python
|
mit
|
goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop
|
python
|
## Code Before:
import sys
import mysql.connector
def insert():
cursor = connection.cursor()
try:
cursor.execute("drop table employees")
except:
pass
cursor.execute("create table employees (id integer primary key, name text)")
cursor.close()
print("Inserting employees...")
for n in xrange(0, 10000):
cursor = connection.cursor()
cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" %
(n, n))
connection.commit()
cursor.close()
def select():
print("Selecting employees...")
while True:
cursor = connection.cursor()
cursor.execute("select * from employees where name like '%1417773'")
for row in cursor:
pass
cursor.close()
connection = mysql.connector.connect(host='localhost', database='test')
if "insert" in sys.argv:
while True:
insert()
elif "insert_once" in sys.argv:
insert()
elif "select" in sys.argv:
select()
else:
print("USAGE: data_access.py <insert|insert_once|select>")
connection.close()
## Instruction:
Change data access script to issue SELECTs that actually return a value
This makes the part about tracing the SQL statements and tracing the
number of rows returned a little more interesting.
## Code After:
from random import randint
import sys
import mysql.connector
NUM_EMPLOYEES = 10000
def insert():
cursor = connection.cursor()
try:
cursor.execute("drop table employees")
except:
pass
cursor.execute("create table employees (id integer primary key, name text)")
cursor.close()
print("Inserting employees...")
for n in xrange(0, NUM_EMPLOYEES):
cursor = connection.cursor()
cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" %
(n, n))
connection.commit()
cursor.close()
def select():
print("Selecting employees...")
while True:
cursor = connection.cursor()
cursor.execute("select * from employees where name like '%%%d'" % randint(0, NUM_EMPLOYEES))
for row in cursor:
pass
cursor.close()
connection = mysql.connector.connect(host='localhost', database='test')
if "insert" in sys.argv:
while True:
insert()
elif "insert_once" in sys.argv:
insert()
elif "select" in sys.argv:
select()
else:
print("USAGE: data_access.py <insert|insert_once|select>")
connection.close()
|
# ... existing code ...
from random import randint
import sys
import mysql.connector
NUM_EMPLOYEES = 10000
def insert():
cursor = connection.cursor()
# ... modified code ...
cursor.close()
print("Inserting employees...")
for n in xrange(0, NUM_EMPLOYEES):
cursor = connection.cursor()
cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" %
(n, n))
...
print("Selecting employees...")
while True:
cursor = connection.cursor()
cursor.execute("select * from employees where name like '%%%d'" % randint(0, NUM_EMPLOYEES))
for row in cursor:
pass
cursor.close()
# ... rest of the code ...
|
fed8d1d85b929dc21cd49cd2cd0ac660f19e7a36
|
comics/crawlers/bizarro.py
|
comics/crawlers/bizarro.py
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bizarro'
language = 'no'
url = 'http://www.start.no/tegneserier/bizarro/'
start_date = '1985-01-01'
end_date = '2009-06-24' # No longer hosted at start.no
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Dan Piraro'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://g2.start.no/tegneserier/striper/bizarro/biz-striper/biz%(date)s.gif' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bizarro'
language = 'no'
url = 'http://underholdning.no.msn.com/tegneserier/bizarro/'
start_date = '1985-01-01'
history_capable_days = 12
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Dan Piraro'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://underholdning.no.msn.com/rss/bizarro.aspx')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.url = entry.enclosures[0].href
return
|
Update 'Bizarro' crawler to use msn.no instead of start.no
|
Update 'Bizarro' crawler to use msn.no instead of start.no
|
Python
|
agpl-3.0
|
klette/comics,datagutten/comics,datagutten/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,jodal/comics
|
python
|
## Code Before:
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bizarro'
language = 'no'
url = 'http://www.start.no/tegneserier/bizarro/'
start_date = '1985-01-01'
end_date = '2009-06-24' # No longer hosted at start.no
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Dan Piraro'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://g2.start.no/tegneserier/striper/bizarro/biz-striper/biz%(date)s.gif' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
## Instruction:
Update 'Bizarro' crawler to use msn.no instead of start.no
## Code After:
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bizarro'
language = 'no'
url = 'http://underholdning.no.msn.com/tegneserier/bizarro/'
start_date = '1985-01-01'
history_capable_days = 12
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Dan Piraro'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://underholdning.no.msn.com/rss/bizarro.aspx')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.url = entry.enclosures[0].href
return
|
...
class ComicMeta(BaseComicMeta):
name = 'Bizarro'
language = 'no'
url = 'http://underholdning.no.msn.com/tegneserier/bizarro/'
start_date = '1985-01-01'
history_capable_days = 12
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Dan Piraro'
...
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://underholdning.no.msn.com/rss/bizarro.aspx')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.url = entry.enclosures[0].href
return
...
|
40e9375f6b35b4a05ad311822705b7a7efe46b56
|
site_scons/get_libs.py
|
site_scons/get_libs.py
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
Add Linux 32-bit search path for Boost libraries
|
Add Linux 32-bit search path for Boost libraries
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware,wheeler-microfluidics/dmf-control-board-firmware
|
python
|
## Code Before:
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/x86_64-linux-gnu',
'/usr/local/lib'] + list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
## Instruction:
Add Linux 32-bit search path for Boost libraries
## Code After:
import os
import sys
from SCons.Script import File
from path_helpers import path
def get_lib_paths():
if sys.platform == 'win32':
lib_paths = set(os.environ['PATH'].split(';'))
else:
lib_paths = set()
if os.environ.has_key('LIBRARY_PATH'):
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
def get_lib(lib_name, LIBPATH=None):
if not LIBPATH:
LIBPATH = []
else:
LIBPATH = LIBPATH[:]
LIBPATH += get_lib_paths()
for lp in [path(p) for p in LIBPATH]:
try:
files = lp.files(lib_name)
except OSError:
continue
if files:
return File(sorted(files, key=len)[0])
return None
|
...
lib_paths.update(os.environ['LIBRARY_PATH'].split(':'))
if os.environ.has_key('LD_LIBRARY_PATH'):
lib_paths.update(os.environ['LD_LIBRARY_PATH'].split(':'))
lib_paths = (['/usr/lib', '/usr/lib/i386-linux-gnu',
'/usr/lib/x86_64-linux-gnu', '/usr/local/lib'] +
list(lib_paths))
return lib_paths
...
|
b0ce1a387ce1c86b8b0008518a99d786006d1f32
|
include/llvm/Config/dlfcn.h
|
include/llvm/Config/dlfcn.h
|
/*
* The LLVM Compiler Infrastructure
*
* This file was developed by the LLVM research group and is distributed under
* the University of Illinois Open Source License. See LICENSE.TXT for details.
*
******************************************************************************
*
* Description:
* This header file is the autoconf replacement for dlfcn.h (if it lives
* on the system).
*/
#ifndef _CONFIG_DLFCN_H
#define _CONFIG_DLFCN_H
#include "llvm/Config/config.h"
#ifdef HAVE_DLFCN_H
#include <dlfcn.h>
#endif
#endif
|
/*
* The LLVM Compiler Infrastructure
*
* This file was developed by the LLVM research group and is distributed under
* the University of Illinois Open Source License. See LICENSE.TXT for details.
*
******************************************************************************
*
* Description:
* This header file is the autoconf replacement for dlfcn.h (if it lives
* on the system).
*/
#ifndef _CONFIG_DLFCN_H
#define _CONFIG_DLFCN_H
#include "llvm/Config/config.h"
#ifdef HAVE_LTDL_H
#include <ltdl.h>
#endif
#ifdef HAVE_DLFCN_H
#include <dlfcn.h>
#endif
#endif
|
Include ltdl.h if we have it.
|
Include ltdl.h if we have it.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@17952 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-llvm,llvm-mirror/llvm,chubbymaggie/asap,apple/swift-llvm,GPUOpen-Drivers/llvm,chubbymaggie/asap,llvm-mirror/llvm,apple/swift-llvm,dslab-epfl/asap,dslab-epfl/asap,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,chubbymaggie/asap,llvm-mirror/llvm,chubbymaggie/asap,chubbymaggie/asap,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,apple/swift-llvm,chubbymaggie/asap,llvm-mirror/llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,llvm-mirror/llvm,dslab-epfl/asap,dslab-epfl/asap,GPUOpen-Drivers/llvm,dslab-epfl/asap
|
c
|
## Code Before:
/*
* The LLVM Compiler Infrastructure
*
* This file was developed by the LLVM research group and is distributed under
* the University of Illinois Open Source License. See LICENSE.TXT for details.
*
******************************************************************************
*
* Description:
* This header file is the autoconf replacement for dlfcn.h (if it lives
* on the system).
*/
#ifndef _CONFIG_DLFCN_H
#define _CONFIG_DLFCN_H
#include "llvm/Config/config.h"
#ifdef HAVE_DLFCN_H
#include <dlfcn.h>
#endif
#endif
## Instruction:
Include ltdl.h if we have it.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@17952 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
/*
* The LLVM Compiler Infrastructure
*
* This file was developed by the LLVM research group and is distributed under
* the University of Illinois Open Source License. See LICENSE.TXT for details.
*
******************************************************************************
*
* Description:
* This header file is the autoconf replacement for dlfcn.h (if it lives
* on the system).
*/
#ifndef _CONFIG_DLFCN_H
#define _CONFIG_DLFCN_H
#include "llvm/Config/config.h"
#ifdef HAVE_LTDL_H
#include <ltdl.h>
#endif
#ifdef HAVE_DLFCN_H
#include <dlfcn.h>
#endif
#endif
|
# ... existing code ...
#include "llvm/Config/config.h"
#ifdef HAVE_LTDL_H
#include <ltdl.h>
#endif
#ifdef HAVE_DLFCN_H
#include <dlfcn.h>
#endif
# ... rest of the code ...
|
fb824bfaae4bd7ceaf5ee81dae2d28a9fe321cff
|
src/test/java/info/u_team/u_team_test/init/TestScreens.java
|
src/test/java/info/u_team/u_team_test/init/TestScreens.java
|
package info.u_team.u_team_test.init;
import info.u_team.u_team_core.util.registry.RegistryWorker;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.screen.*;
import net.minecraft.client.gui.ScreenManager;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent;
@EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD, value = Dist.CLIENT)
public class TestScreens {
@SubscribeEvent
public static void register(FMLClientSetupEvent event) {
RegistryWorker.runOnMainThread(() -> {
ScreenManager.registerFactory(TestContainers.BASIC, BasicTileEntityScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_ENERGY_CREATOR, BasicEnergyCreatorScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_FLUID_INVENTORY, BasicFluidInventoryScreen::new);
});
}
}
|
package info.u_team.u_team_test.init;
import info.u_team.u_team_core.util.registry.SyncedWorker;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.screen.*;
import net.minecraft.client.gui.ScreenManager;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent;
@EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD, value = Dist.CLIENT)
public class TestScreens {
@SubscribeEvent
public static void register(FMLClientSetupEvent event) {
SyncedWorker.runOnMainThread(() -> {
ScreenManager.registerFactory(TestContainers.BASIC, BasicTileEntityScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_ENERGY_CREATOR, BasicEnergyCreatorScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_FLUID_INVENTORY, BasicFluidInventoryScreen::new);
});
}
}
|
Fix class rename in caller class
|
Fix class rename in caller class
|
Java
|
apache-2.0
|
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
|
java
|
## Code Before:
package info.u_team.u_team_test.init;
import info.u_team.u_team_core.util.registry.RegistryWorker;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.screen.*;
import net.minecraft.client.gui.ScreenManager;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent;
@EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD, value = Dist.CLIENT)
public class TestScreens {
@SubscribeEvent
public static void register(FMLClientSetupEvent event) {
RegistryWorker.runOnMainThread(() -> {
ScreenManager.registerFactory(TestContainers.BASIC, BasicTileEntityScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_ENERGY_CREATOR, BasicEnergyCreatorScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_FLUID_INVENTORY, BasicFluidInventoryScreen::new);
});
}
}
## Instruction:
Fix class rename in caller class
## Code After:
package info.u_team.u_team_test.init;
import info.u_team.u_team_core.util.registry.SyncedWorker;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.screen.*;
import net.minecraft.client.gui.ScreenManager;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent;
@EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD, value = Dist.CLIENT)
public class TestScreens {
@SubscribeEvent
public static void register(FMLClientSetupEvent event) {
SyncedWorker.runOnMainThread(() -> {
ScreenManager.registerFactory(TestContainers.BASIC, BasicTileEntityScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_ENERGY_CREATOR, BasicEnergyCreatorScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_FLUID_INVENTORY, BasicFluidInventoryScreen::new);
});
}
}
|
...
package info.u_team.u_team_test.init;
import info.u_team.u_team_core.util.registry.SyncedWorker;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.screen.*;
import net.minecraft.client.gui.ScreenManager;
...
@SubscribeEvent
public static void register(FMLClientSetupEvent event) {
SyncedWorker.runOnMainThread(() -> {
ScreenManager.registerFactory(TestContainers.BASIC, BasicTileEntityScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_ENERGY_CREATOR, BasicEnergyCreatorScreen::new);
ScreenManager.registerFactory(TestContainers.BASIC_FLUID_INVENTORY, BasicFluidInventoryScreen::new);
...
|
7a00ff49799afc50da74a748d07c52fef57ebc84
|
setup.py
|
setup.py
|
import tungsten
from distutils.core import setup
setup(
name='Tungsten',
version=tungsten.__version__,
author='Seena Burns',
packages={'tungsten': 'tungsten'},
license=open('LICENSE.txt').read(),
description='Wolfram Alpha API built for Python.',
long_description=open('README.md').read(),
install_requires=[
"requests",
],
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
),
)
|
import tungsten
from distutils.core import setup
setup(
name='Tungsten',
version=tungsten.__version__,
author='Seena Burns',
author_email='[email protected]',
url='https://github.com/seenaburns/Tungsten',
packages={'tungsten': 'tungsten'},
license=open('LICENSE.txt').read(),
description='Wolfram Alpha API built for Python.',
long_description=open('README.md').read(),
install_requires=[
"requests",
],
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
),
)
|
Add url / author email for PyPI regs
|
Add url / author email for PyPI regs
|
Python
|
bsd-3-clause
|
seenaburns/Tungsten
|
python
|
## Code Before:
import tungsten
from distutils.core import setup
setup(
name='Tungsten',
version=tungsten.__version__,
author='Seena Burns',
packages={'tungsten': 'tungsten'},
license=open('LICENSE.txt').read(),
description='Wolfram Alpha API built for Python.',
long_description=open('README.md').read(),
install_requires=[
"requests",
],
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
),
)
## Instruction:
Add url / author email for PyPI regs
## Code After:
import tungsten
from distutils.core import setup
setup(
name='Tungsten',
version=tungsten.__version__,
author='Seena Burns',
author_email='[email protected]',
url='https://github.com/seenaburns/Tungsten',
packages={'tungsten': 'tungsten'},
license=open('LICENSE.txt').read(),
description='Wolfram Alpha API built for Python.',
long_description=open('README.md').read(),
install_requires=[
"requests",
],
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
),
)
|
...
name='Tungsten',
version=tungsten.__version__,
author='Seena Burns',
author_email='[email protected]',
url='https://github.com/seenaburns/Tungsten',
packages={'tungsten': 'tungsten'},
license=open('LICENSE.txt').read(),
description='Wolfram Alpha API built for Python.',
...
|
70323d2cc7c568fecda66adb0e8ace1922b15b8f
|
recipes/graphviz/run_test.py
|
recipes/graphviz/run_test.py
|
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
|
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
Add tests for svg and pdf on Windows
|
Add tests for svg and pdf on Windows
|
Python
|
bsd-3-clause
|
cpaulik/staged-recipes,jerowe/staged-recipes,cpaulik/staged-recipes,asmeurer/staged-recipes,hajapy/staged-recipes,guillochon/staged-recipes,richardotis/staged-recipes,glemaitre/staged-recipes,kwilcox/staged-recipes,patricksnape/staged-recipes,pstjohn/staged-recipes,johannesring/staged-recipes,caspervdw/staged-recipes,petrushy/staged-recipes,sannykr/staged-recipes,isuruf/staged-recipes,birdsarah/staged-recipes,vamega/staged-recipes,sodre/staged-recipes,Cashalow/staged-recipes,khallock/staged-recipes,rmcgibbo/staged-recipes,Juanlu001/staged-recipes,tylere/staged-recipes,scopatz/staged-recipes,vamega/staged-recipes,Savvysherpa/staged-recipes,tylere/staged-recipes,dharhas/staged-recipes,dfroger/staged-recipes,nicoddemus/staged-recipes,JohnGreeley/staged-recipes,shadowwalkersb/staged-recipes,jerowe/staged-recipes,OpenPIV/staged-recipes,jjhelmus/staged-recipes,NOAA-ORR-ERD/staged-recipes,jakirkham/staged-recipes,JohnGreeley/staged-recipes,richardotis/staged-recipes,jakirkham/staged-recipes,data-exp-lab/staged-recipes,ceholden/staged-recipes,sodre/staged-recipes,NOAA-ORR-ERD/staged-recipes,igortg/staged-recipes,hadim/staged-recipes,grlee77/staged-recipes,nicoddemus/staged-recipes,dschreij/staged-recipes,chohner/staged-recipes,rvalieris/staged-recipes,jcb91/staged-recipes,valgur/staged-recipes,larray-project/staged-recipes,birdsarah/staged-recipes,rvalieris/staged-recipes,benvandyke/staged-recipes,goanpeca/staged-recipes,isuruf/staged-recipes,mcernak/staged-recipes,jochym/staged-recipes,sannykr/staged-recipes,data-exp-lab/staged-recipes,petrushy/staged-recipes,sodre/staged-recipes,Juanlu001/staged-recipes,stuertz/staged-recipes,planetarypy/staged-recipes,chrisburr/staged-recipes,mariusvniekerk/staged-recipes,jochym/staged-recipes,chrisburr/staged-recipes,stuertz/staged-recipes,gqmelo/staged-recipes,ocefpaf/staged-recipes,mcernak/staged-recipes,atedstone/staged-recipes,johannesring/staged-recipes,basnijholt/staged-recipes,mariusvniekerk/staged-recipes,patricksnape/staged-recipes,ReimarBauer/staged-recipes,bmabey/staged-recipes,dschreij/staged-recipes,mcs07/staged-recipes,kwilcox/staged-recipes,basnijholt/staged-recipes,koverholt/staged-recipes,atedstone/staged-recipes,SylvainCorlay/staged-recipes,dharhas/staged-recipes,pmlandwehr/staged-recipes,shadowwalkersb/staged-recipes,valgur/staged-recipes,khallock/staged-recipes,barkls/staged-recipes,glemaitre/staged-recipes,dfroger/staged-recipes,hbredin/staged-recipes,benvandyke/staged-recipes,gqmelo/staged-recipes,synapticarbors/staged-recipes,ReimarBauer/staged-recipes,koverholt/staged-recipes,conda-forge/staged-recipes,Cashalow/staged-recipes,caspervdw/staged-recipes,johanneskoester/staged-recipes,ceholden/staged-recipes,rolando-contrib/staged-recipes,hbredin/staged-recipes,rmcgibbo/staged-recipes,conda-forge/staged-recipes,bmabey/staged-recipes,scopatz/staged-recipes,SylvainCorlay/staged-recipes,blowekamp/staged-recipes,ocefpaf/staged-recipes,guillochon/staged-recipes,chohner/staged-recipes,planetarypy/staged-recipes,blowekamp/staged-recipes,asmeurer/staged-recipes,pmlandwehr/staged-recipes,jcb91/staged-recipes,hadim/staged-recipes,barkls/staged-recipes,pstjohn/staged-recipes,igortg/staged-recipes,mcs07/staged-recipes,Savvysherpa/staged-recipes,larray-project/staged-recipes,rolando-contrib/staged-recipes,hajapy/staged-recipes,jjhelmus/staged-recipes,OpenPIV/staged-recipes,johanneskoester/staged-recipes,grlee77/staged-recipes,synapticarbors/staged-recipes,goanpeca/staged-recipes
|
python
|
## Code Before:
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
## Instruction:
Add tests for svg and pdf on Windows
## Code After:
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
# ... existing code ...
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
# ... rest of the code ...
|
3f569ae0b64ffac7dc088c5b8bcbd89dce61b069
|
plugins/plugin-machine/che-plugin-machine-ext-client/src/main/java/org/eclipse/che/ide/extension/machine/client/command/valueproviders/CurrentProjectRelativePathProvider.java
|
plugins/plugin-machine/che-plugin-machine-ext-client/src/main/java/org/eclipse/che/ide/extension/machine/client/command/valueproviders/CurrentProjectRelativePathProvider.java
|
/*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.extension.machine.client.command.valueproviders;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.che.ide.api.app.AppContext;
/**
* Provides relative path to specific project. Path to project resolves from current workspace root.
* e.g. /project_name.
*
* Need for IDEX-3924 as intermediate solution.
*
* @author Vlad Zhukovskiy
*/
@Singleton
public class CurrentProjectRelativePathProvider implements CommandPropertyValueProvider {
private static final String KEY = "${current.project.relpath}";
private AppContext appContext;
@Inject
public CurrentProjectRelativePathProvider(AppContext appContext) {
this.appContext = appContext;
}
@Override
public String getKey() {
return KEY;
}
@Override
public String getValue() {
return appContext.getCurrentProject().getProjectConfig().getPath().substring(1);
}
}
|
/*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.extension.machine.client.command.valueproviders;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.che.ide.api.app.AppContext;
import org.eclipse.che.ide.api.app.CurrentProject;
/**
* Provides relative path to specific project. Path to project resolves from current workspace root.
* e.g. /project_name.
*
* Need for IDEX-3924 as intermediate solution.
*
* @author Vlad Zhukovskiy
*/
@Singleton
public class CurrentProjectRelativePathProvider implements CommandPropertyValueProvider {
private static final String KEY = "${current.project.relpath}";
private AppContext appContext;
@Inject
public CurrentProjectRelativePathProvider(AppContext appContext) {
this.appContext = appContext;
}
@Override
public String getKey() {
return KEY;
}
@Override
public String getValue() {
CurrentProject currentProject = appContext.getCurrentProject();
if (currentProject == null) {
return "";
}
return currentProject.getProjectConfig().getPath().substring(1);
}
}
|
Add checking on current project == null
|
Add checking on current project == null
Signed-off-by: Vitaly Parfonov <[email protected]>
|
Java
|
epl-1.0
|
akervern/che,bartlomiej-laczkowski/che,kaloyan-raev/che,TypeFox/che,sleshchenko/che,Mirage20/che,cemalkilic/che,cemalkilic/che,stour/che,evidolob/che,cdietrich/che,ollie314/che,cdietrich/che,TypeFox/che,cemalkilic/che,cemalkilic/che,stour/che,TypeFox/che,akervern/che,gazarenkov/che-sketch,sudaraka94/che,codenvy/che,sunix/che,slemeur/che,Patricol/che,sudaraka94/che,stour/che,akervern/che,TypeFox/che,lehmanju/che,cemalkilic/che,alexVengrovsk/che,gazarenkov/che-sketch,TypeFox/che,bartlomiej-laczkowski/che,ollie314/che,kaloyan-raev/che,TypeFox/che,davidfestal/che,sleshchenko/che,sunix/che,lehmanju/che,TypeFox/che,gazarenkov/che-sketch,sunix/che,davidfestal/che,cdietrich/che,cdietrich/che,bartlomiej-laczkowski/che,sunix/che,Patricol/che,bartlomiej-laczkowski/che,snjeza/che,gazarenkov/che-sketch,davidfestal/che,davidfestal/che,snjeza/che,Mirage20/che,Patricol/che,jonahkichwacoders/che,cemalkilic/che,Patricol/che,lehmanju/che,akervern/che,snjeza/che,sunix/che,davidfestal/che,davidfestal/che,Patricol/che,bartlomiej-laczkowski/che,sleshchenko/che,jonahkichwacoders/che,Mirage20/che,TypeFox/che,sudaraka94/che,bartlomiej-laczkowski/che,stour/che,cdietrich/che,sudaraka94/che,slemeur/che,codenvy/che,gazarenkov/che-sketch,kaloyan-raev/che,cdietrich/che,cemalkilic/che,jonahkichwacoders/che,slemeur/che,slemeur/che,slemeur/che,gazarenkov/che-sketch,snjeza/che,kaloyan-raev/che,Mirage20/che,snjeza/che,jonahkichwacoders/che,davidfestal/che,Patricol/che,davidfestal/che,sudaraka94/che,akervern/che,evidolob/che,ollie314/che,lehmanju/che,sleshchenko/che,lehmanju/che,jonahkichwacoders/che,Mirage20/che,lehmanju/che,Mirage20/che,ollie314/che,gazarenkov/che-sketch,bartlomiej-laczkowski/che,jonahkichwacoders/che,codenvy/che,ollie314/che,sleshchenko/che,sleshchenko/che,sleshchenko/che,evidolob/che,sleshchenko/che,akervern/che,snjeza/che,cdietrich/che,kaloyan-raev/che,sunix/che,ollie314/che,bartlomiej-laczkowski/che,snjeza/che,sudaraka94/che,sudaraka94/che,cemalkilic/che,sudaraka94/che,sunix/che,akervern/che,lehmanju/che,sunix/che,evidolob/che,sudaraka94/che,akervern/che,cdietrich/che,cemalkilic/che,jonahkichwacoders/che,alexVengrovsk/che,sleshchenko/che,sudaraka94/che,Patricol/che,TypeFox/che,Patricol/che,codenvy/che,lehmanju/che,davidfestal/che,sunix/che,bartlomiej-laczkowski/che,akervern/che,jonahkichwacoders/che,Patricol/che,sleshchenko/che,jonahkichwacoders/che,davidfestal/che,Patricol/che,cdietrich/che,jonahkichwacoders/che,gazarenkov/che-sketch,kaloyan-raev/che,TypeFox/che,alexVengrovsk/che,evidolob/che,gazarenkov/che-sketch,snjeza/che,alexVengrovsk/che,snjeza/che,lehmanju/che
|
java
|
## Code Before:
/*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.extension.machine.client.command.valueproviders;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.che.ide.api.app.AppContext;
/**
* Provides relative path to specific project. Path to project resolves from current workspace root.
* e.g. /project_name.
*
* Need for IDEX-3924 as intermediate solution.
*
* @author Vlad Zhukovskiy
*/
@Singleton
public class CurrentProjectRelativePathProvider implements CommandPropertyValueProvider {
private static final String KEY = "${current.project.relpath}";
private AppContext appContext;
@Inject
public CurrentProjectRelativePathProvider(AppContext appContext) {
this.appContext = appContext;
}
@Override
public String getKey() {
return KEY;
}
@Override
public String getValue() {
return appContext.getCurrentProject().getProjectConfig().getPath().substring(1);
}
}
## Instruction:
Add checking on current project == null
Signed-off-by: Vitaly Parfonov <[email protected]>
## Code After:
/*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.extension.machine.client.command.valueproviders;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.che.ide.api.app.AppContext;
import org.eclipse.che.ide.api.app.CurrentProject;
/**
* Provides relative path to specific project. Path to project resolves from current workspace root.
* e.g. /project_name.
*
* Need for IDEX-3924 as intermediate solution.
*
* @author Vlad Zhukovskiy
*/
@Singleton
public class CurrentProjectRelativePathProvider implements CommandPropertyValueProvider {
private static final String KEY = "${current.project.relpath}";
private AppContext appContext;
@Inject
public CurrentProjectRelativePathProvider(AppContext appContext) {
this.appContext = appContext;
}
@Override
public String getKey() {
return KEY;
}
@Override
public String getValue() {
CurrentProject currentProject = appContext.getCurrentProject();
if (currentProject == null) {
return "";
}
return currentProject.getProjectConfig().getPath().substring(1);
}
}
|
...
import com.google.inject.Singleton;
import org.eclipse.che.ide.api.app.AppContext;
import org.eclipse.che.ide.api.app.CurrentProject;
/**
* Provides relative path to specific project. Path to project resolves from current workspace root.
...
@Override
public String getValue() {
CurrentProject currentProject = appContext.getCurrentProject();
if (currentProject == null) {
return "";
}
return currentProject.getProjectConfig().getPath().substring(1);
}
}
...
|
08eb1f9e510b85e77d401ca4e13b7ad5354f4ecf
|
ingestors/email/outlookpst.py
|
ingestors/email/outlookpst.py
|
import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
log = logging.getLogger(__name__)
class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport):
MIME_TYPES = ['application/vnd.ms-outlook']
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path, entity):
entity.schema = model.get('Package')
self.extract_ole_metadata(file_path, entity)
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
except Exception:
log.exception("Failed to unpack PST.")
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
raise
|
import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
log = logging.getLogger(__name__)
class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport):
MIME_TYPES = ['application/vnd.ms-outlook']
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path, entity):
entity.schema = model.get('Package')
self.extract_ole_metadata(file_path, entity)
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-S', # single files
'-D', # include deleted
# '-r', # recursive structure
'-8', # utf-8 where possible
'-cv', # export vcards
# '-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
except Exception:
log.exception("Failed to unpack PST.")
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
raise
|
Make outlook emit single files
|
Make outlook emit single files
|
Python
|
mit
|
alephdata/ingestors
|
python
|
## Code Before:
import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
log = logging.getLogger(__name__)
class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport):
MIME_TYPES = ['application/vnd.ms-outlook']
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path, entity):
entity.schema = model.get('Package')
self.extract_ole_metadata(file_path, entity)
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
except Exception:
log.exception("Failed to unpack PST.")
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
raise
## Instruction:
Make outlook emit single files
## Code After:
import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
log = logging.getLogger(__name__)
class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport):
MIME_TYPES = ['application/vnd.ms-outlook']
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path, entity):
entity.schema = model.get('Package')
self.extract_ole_metadata(file_path, entity)
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-S', # single files
'-D', # include deleted
# '-r', # recursive structure
'-8', # utf-8 where possible
'-cv', # export vcards
# '-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
except Exception:
log.exception("Failed to unpack PST.")
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
raise
|
# ... existing code ...
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-S', # single files
'-D', # include deleted
# '-r', # recursive structure
'-8', # utf-8 where possible
'-cv', # export vcards
# '-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
# ... rest of the code ...
|
04557bbff362ae3b89e7dd98a1fb11e0aaeba50e
|
common/djangoapps/student/migrations/0010_auto_20170207_0458.py
|
common/djangoapps/student/migrations/0010_auto_20170207_0458.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
migrations.RunSQL(
# Do nothing:
"select 1",
"select 1"
)
]
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
# Nothing to do.
]
|
Make this no-op migration be a true no-op.
|
Make this no-op migration be a true no-op.
|
Python
|
agpl-3.0
|
philanthropy-u/edx-platform,lduarte1991/edx-platform,eduNEXT/edx-platform,msegado/edx-platform,cpennington/edx-platform,hastexo/edx-platform,a-parhom/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,gymnasium/edx-platform,eduNEXT/edunext-platform,pepeportela/edx-platform,ESOedX/edx-platform,pepeportela/edx-platform,miptliot/edx-platform,BehavioralInsightsTeam/edx-platform,appsembler/edx-platform,msegado/edx-platform,appsembler/edx-platform,eduNEXT/edx-platform,TeachAtTUM/edx-platform,gymnasium/edx-platform,Lektorium-LLC/edx-platform,miptliot/edx-platform,edx/edx-platform,procangroup/edx-platform,mitocw/edx-platform,romain-li/edx-platform,miptliot/edx-platform,CredoReference/edx-platform,Lektorium-LLC/edx-platform,cpennington/edx-platform,proversity-org/edx-platform,fintech-circle/edx-platform,jolyonb/edx-platform,ahmedaljazzar/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,BehavioralInsightsTeam/edx-platform,gsehub/edx-platform,edx-solutions/edx-platform,teltek/edx-platform,cpennington/edx-platform,pepeportela/edx-platform,BehavioralInsightsTeam/edx-platform,arbrandes/edx-platform,raccoongang/edx-platform,stvstnfrd/edx-platform,ahmedaljazzar/edx-platform,hastexo/edx-platform,arbrandes/edx-platform,ESOedX/edx-platform,CredoReference/edx-platform,eduNEXT/edunext-platform,mitocw/edx-platform,stvstnfrd/edx-platform,fintech-circle/edx-platform,mitocw/edx-platform,pabloborrego93/edx-platform,gsehub/edx-platform,kmoocdev2/edx-platform,angelapper/edx-platform,jolyonb/edx-platform,gymnasium/edx-platform,eduNEXT/edx-platform,gsehub/edx-platform,msegado/edx-platform,teltek/edx-platform,TeachAtTUM/edx-platform,appsembler/edx-platform,pepeportela/edx-platform,Edraak/edraak-platform,pabloborrego93/edx-platform,a-parhom/edx-platform,fintech-circle/edx-platform,pabloborrego93/edx-platform,appsembler/edx-platform,lduarte1991/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,edx-solutions/edx-platform,teltek/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,Lektorium-LLC/edx-platform,edx/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,lduarte1991/edx-platform,miptliot/edx-platform,procangroup/edx-platform,CredoReference/edx-platform,teltek/edx-platform,procangroup/edx-platform,philanthropy-u/edx-platform,arbrandes/edx-platform,EDUlib/edx-platform,Edraak/edraak-platform,Edraak/edraak-platform,jolyonb/edx-platform,raccoongang/edx-platform,gymnasium/edx-platform,fintech-circle/edx-platform,raccoongang/edx-platform,pabloborrego93/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,kmoocdev2/edx-platform,proversity-org/edx-platform,CredoReference/edx-platform,ahmedaljazzar/edx-platform,lduarte1991/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,ahmedaljazzar/edx-platform,kmoocdev2/edx-platform,romain-li/edx-platform,Edraak/edraak-platform,EDUlib/edx-platform,romain-li/edx-platform,cpennington/edx-platform,eduNEXT/edunext-platform,ESOedX/edx-platform,edx/edx-platform,Stanford-Online/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,Lektorium-LLC/edx-platform,hastexo/edx-platform,hastexo/edx-platform,a-parhom/edx-platform,procangroup/edx-platform,BehavioralInsightsTeam/edx-platform,mitocw/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,TeachAtTUM/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,raccoongang/edx-platform,a-parhom/edx-platform
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
migrations.RunSQL(
# Do nothing:
"select 1",
"select 1"
)
]
## Instruction:
Make this no-op migration be a true no-op.
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0009_auto_20170111_0422'),
]
# This migration was to add a constraint that we lost in the Django
# 1.4->1.8 upgrade. But since the constraint used to be created, production
# would already have the constraint even before running the migration, and
# running the migration would fail. We needed to make the migration
# idempotent. Instead of reverting this migration while we did that, we
# edited it to be a SQL no-op, so that people who had already applied it
# wouldn't end up with a ghost migration.
# It had been:
#
# migrations.RunSQL(
# "create unique index email on auth_user (email);",
# "drop index email on auth_user;"
# )
operations = [
# Nothing to do.
]
|
// ... existing code ...
# )
operations = [
# Nothing to do.
]
// ... rest of the code ...
|
272ece1774cebaf8d6d6ae9e0dfb5fe0cce97083
|
manage.py
|
manage.py
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Add missing env variables for testing.
|
Add missing env variables for testing.
|
Python
|
bsd-2-clause
|
mblayman/lcp,mblayman/lcp,mblayman/lcp
|
python
|
## Code Before:
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
## Instruction:
Add missing env variables for testing.
## Code After:
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'conductor.settings.development')
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
...
if 'test' in sys.argv:
# For now, fake setting the environment for testing.
os.environ['DJANGO_SETTINGS_MODULE'] = 'conductor.settings.test'
os.environ['CORS_ORIGIN_WHITELIST'] = 'localhost:4200'
os.environ['SECRET_KEY'] = 'asecrettoeverybody'
os.environ['STATIC_URL'] = '/static/'
from django.core.management import execute_from_command_line
...
|
edfbeabb802e64527094d46680f994a44ed7f0bd
|
froide_campaign/providers/amenity_local.py
|
froide_campaign/providers/amenity_local.py
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
Select popular pbs first instead of only closest
|
Select popular pbs first instead of only closest
|
Python
|
mit
|
okfde/froide-campaign,okfde/froide-campaign,okfde/froide-campaign
|
python
|
## Code Before:
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
## Instruction:
Select popular pbs first instead of only closest
## Code After:
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
...
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
...
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
...
|
8f1e899485677eb8accfdc999fbd1e7e12187302
|
Classes/WeakUniqueCollection.h
|
Classes/WeakUniqueCollection.h
|
//
// WeakUniqueCollection.h
// book-shelf
//
// Created by Artem Gladkov on 28.06.16.
// Copyright © 2016 Sibext Ltd. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface WeakUniqueCollection<ObjectType> : NSObject
@property(readonly)NSUInteger count;
- (void)addObject:(ObjectType)object;
- (void)removeObject:(ObjectType)object;
- (void)removeAllObjects;
- (ObjectType)anyObject;
- (NSArray <ObjectType> *)allObjects;
- (BOOL)member:(ObjectType)object;
@end
|
//
// WeakUniqueCollection.h
// book-shelf
//
// Created by Artem Gladkov on 28.06.16.
// Copyright © 2016 Sibext Ltd. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
WeakUniqueCollection keeps weak references to the objects and maintains uniqueness.
It's public API is fully thread safe.
WeakUniqueCollection is not optimized for working with large amount of objects.
*/
@interface WeakUniqueCollection<ObjectType> : NSObject
@property(readonly)NSUInteger count;
/**
Adds object to the collection
@param object ObjectType to be added to the collection
*/
- (void)addObject:(ObjectType)object;
/**
Removes object from the collection (if collection contains it).
@param object ObjectType to be removed from the collection
*/
- (void)removeObject:(ObjectType)object;
/**
Removes all objects from the collection.
*/
- (void)removeAllObjects;
/**
Returns any object from the collection.
@return ObjectType or nil (if the collection is empty).
*/
- (nullable ObjectType)anyObject;
/**
Returns array with all objects from the collection.
@return NSArray with objects (cound be empty if the collection is empty).
*/
- (NSArray <ObjectType> *)allObjects;
/**
Determines if the object is already contained in the collection.
@param object ObjectType to be verified
@return YES if object is in the collection
NO if object is not in the collection
*/
- (BOOL)member:(ObjectType)object;
@end
NS_ASSUME_NONNULL_END
|
Add documentation for public API and nullability specification.
|
Add documentation for public API and nullability specification.
|
C
|
mit
|
sibext/WeakUniqueCollection,sibext/WeakUniqueCollection
|
c
|
## Code Before:
//
// WeakUniqueCollection.h
// book-shelf
//
// Created by Artem Gladkov on 28.06.16.
// Copyright © 2016 Sibext Ltd. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface WeakUniqueCollection<ObjectType> : NSObject
@property(readonly)NSUInteger count;
- (void)addObject:(ObjectType)object;
- (void)removeObject:(ObjectType)object;
- (void)removeAllObjects;
- (ObjectType)anyObject;
- (NSArray <ObjectType> *)allObjects;
- (BOOL)member:(ObjectType)object;
@end
## Instruction:
Add documentation for public API and nullability specification.
## Code After:
//
// WeakUniqueCollection.h
// book-shelf
//
// Created by Artem Gladkov on 28.06.16.
// Copyright © 2016 Sibext Ltd. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
WeakUniqueCollection keeps weak references to the objects and maintains uniqueness.
It's public API is fully thread safe.
WeakUniqueCollection is not optimized for working with large amount of objects.
*/
@interface WeakUniqueCollection<ObjectType> : NSObject
@property(readonly)NSUInteger count;
/**
Adds object to the collection
@param object ObjectType to be added to the collection
*/
- (void)addObject:(ObjectType)object;
/**
Removes object from the collection (if collection contains it).
@param object ObjectType to be removed from the collection
*/
- (void)removeObject:(ObjectType)object;
/**
Removes all objects from the collection.
*/
- (void)removeAllObjects;
/**
Returns any object from the collection.
@return ObjectType or nil (if the collection is empty).
*/
- (nullable ObjectType)anyObject;
/**
Returns array with all objects from the collection.
@return NSArray with objects (cound be empty if the collection is empty).
*/
- (NSArray <ObjectType> *)allObjects;
/**
Determines if the object is already contained in the collection.
@param object ObjectType to be verified
@return YES if object is in the collection
NO if object is not in the collection
*/
- (BOOL)member:(ObjectType)object;
@end
NS_ASSUME_NONNULL_END
|
// ... existing code ...
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
WeakUniqueCollection keeps weak references to the objects and maintains uniqueness.
It's public API is fully thread safe.
WeakUniqueCollection is not optimized for working with large amount of objects.
*/
@interface WeakUniqueCollection<ObjectType> : NSObject
@property(readonly)NSUInteger count;
/**
Adds object to the collection
@param object ObjectType to be added to the collection
*/
- (void)addObject:(ObjectType)object;
/**
Removes object from the collection (if collection contains it).
@param object ObjectType to be removed from the collection
*/
- (void)removeObject:(ObjectType)object;
/**
Removes all objects from the collection.
*/
- (void)removeAllObjects;
/**
Returns any object from the collection.
@return ObjectType or nil (if the collection is empty).
*/
- (nullable ObjectType)anyObject;
/**
Returns array with all objects from the collection.
@return NSArray with objects (cound be empty if the collection is empty).
*/
- (NSArray <ObjectType> *)allObjects;
/**
Determines if the object is already contained in the collection.
@param object ObjectType to be verified
@return YES if object is in the collection
NO if object is not in the collection
*/
- (BOOL)member:(ObjectType)object;
@end
NS_ASSUME_NONNULL_END
// ... rest of the code ...
|
e63a914457fc10d895eb776a164939da3ddd9464
|
waftools/gogobject.py
|
waftools/gogobject.py
|
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg]
return task
|
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg, node.parent.find_node('config.json')]
return task
|
Use config.json as a go-gobject-gen dependency as well.
|
Use config.json as a go-gobject-gen dependency as well.
|
Python
|
mit
|
nsf/gogobject,nsf/gogobject,nsf/gogobject,nsf/gogobject
|
python
|
## Code Before:
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg]
return task
## Instruction:
Use config.json as a go-gobject-gen dependency as well.
## Code After:
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg, node.parent.find_node('config.json')]
return task
|
// ... existing code ...
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg, node.parent.find_node('config.json')]
return task
// ... rest of the code ...
|
849a4e5daf2eb845213ea76179d7a8143148f39a
|
lib/mixins.py
|
lib/mixins.py
|
class Countable(object):
@classmethod
def count(cls, options={}):
return int(cls.get("count", **options))
class Metafields(object):
def metafields(self):
return Metafield.find(resource=self.__class__.plural, resource_id=self.id)
def add_metafield(self, metafield):
if self.is_new():
raise ValueError("You can only add metafields to a resource that has been saved")
metafield._prefix_options = dict(resource=self.__class__.plural, resource_id=self.id)
metafield.save()
return metafield
class Events(object):
def events(self):
return Event.find(resource=self.__class__.plural, resource_id=self.id)
|
class Countable(object):
@classmethod
def count(cls, _options=None, **kwargs):
if _options is None:
_options = kwargs
return int(cls.get("count", **_options))
class Metafields(object):
def metafields(self):
return Metafield.find(resource=self.__class__.plural, resource_id=self.id)
def add_metafield(self, metafield):
if self.is_new():
raise ValueError("You can only add metafields to a resource that has been saved")
metafield._prefix_options = dict(resource=self.__class__.plural, resource_id=self.id)
metafield.save()
return metafield
class Events(object):
def events(self):
return Event.find(resource=self.__class__.plural, resource_id=self.id)
|
Allow count method to be used the same way as find.
|
Allow count method to be used the same way as find.
|
Python
|
mit
|
varesa/shopify_python_api,metric-collective/shopify_python_api,gavinballard/shopify_python_api,asiviero/shopify_python_api,ifnull/shopify_python_api,Shopify/shopify_python_api,SmileyJames/shopify_python_api
|
python
|
## Code Before:
class Countable(object):
@classmethod
def count(cls, options={}):
return int(cls.get("count", **options))
class Metafields(object):
def metafields(self):
return Metafield.find(resource=self.__class__.plural, resource_id=self.id)
def add_metafield(self, metafield):
if self.is_new():
raise ValueError("You can only add metafields to a resource that has been saved")
metafield._prefix_options = dict(resource=self.__class__.plural, resource_id=self.id)
metafield.save()
return metafield
class Events(object):
def events(self):
return Event.find(resource=self.__class__.plural, resource_id=self.id)
## Instruction:
Allow count method to be used the same way as find.
## Code After:
class Countable(object):
@classmethod
def count(cls, _options=None, **kwargs):
if _options is None:
_options = kwargs
return int(cls.get("count", **_options))
class Metafields(object):
def metafields(self):
return Metafield.find(resource=self.__class__.plural, resource_id=self.id)
def add_metafield(self, metafield):
if self.is_new():
raise ValueError("You can only add metafields to a resource that has been saved")
metafield._prefix_options = dict(resource=self.__class__.plural, resource_id=self.id)
metafield.save()
return metafield
class Events(object):
def events(self):
return Event.find(resource=self.__class__.plural, resource_id=self.id)
|
...
class Countable(object):
@classmethod
def count(cls, _options=None, **kwargs):
if _options is None:
_options = kwargs
return int(cls.get("count", **_options))
class Metafields(object):
...
|
cc6c40b64f8dfde533977883124e22e0fbc80e5c
|
soco/__init__.py
|
soco/__init__.py
|
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <[email protected]>'
__version__ = '0.7'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import discover, SoCo, SonosDiscovery
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['discover', 'SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'The SoCo-Team <[email protected]>'
__version__ = '0.7'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import discover, SoCo, SonosDiscovery
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['discover', 'SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Update author info to "The SoCo-Team"
|
Update author info to "The SoCo-Team"
|
Python
|
mit
|
TrondKjeldas/SoCo,flavio/SoCo,dundeemt/SoCo,xxdede/SoCo,KennethNielsen/SoCo,petteraas/SoCo,bwhaley/SoCo,xxdede/SoCo,oyvindmal/SocoWebService,TrondKjeldas/SoCo,TrondKjeldas/SoCo,petteraas/SoCo,dajobe/SoCo,intfrr/SoCo,intfrr/SoCo,xxdede/SoCo,fgend31/SoCo,jlmcgehee21/SoCo,DPH/SoCo,dsully/SoCo,meska/SoCo,bwhaley/SoCo,dajobe/SoCo,SoCo/SoCo,flavio/SoCo,lawrenceakka/SoCo,SoCo/SoCo,lawrenceakka/SoCo,KennethNielsen/SoCo,bwhaley/SoCo,fxstein/SoCo,petteraas/SoCo,fgend31/SoCo,jlmcgehee21/SoCo,fxstein/SoCo,simonalpha/SoCo,DPH/SoCo,oyvindmal/SocoWebService,simonalpha/SoCo,meska/SoCo,dundeemt/SoCo,dsully/SoCo
|
python
|
## Code Before:
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <[email protected]>'
__version__ = '0.7'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import discover, SoCo, SonosDiscovery
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['discover', 'SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
## Instruction:
Update author info to "The SoCo-Team"
## Code After:
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'The SoCo-Team <[email protected]>'
__version__ = '0.7'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import discover, SoCo, SonosDiscovery
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['discover', 'SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
// ... existing code ...
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'The SoCo-Team <[email protected]>'
__version__ = '0.7'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
// ... rest of the code ...
|
f7e496aa7ee59f84d13d19989460ae08ec803251
|
zuul-core/src/main/java/com/netflix/zuul/util/ProxyUtils.java
|
zuul-core/src/main/java/com/netflix/zuul/util/ProxyUtils.java
|
package com.netflix.zuul.util;
import com.netflix.client.http.HttpResponse;
import com.netflix.zuul.context.HttpRequestMessage;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
* User: [email protected]
* Date: 6/8/15
* Time: 11:50 AM
*/
public class ProxyUtils
{
public static boolean isValidRequestHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "content-length":
case "transfer-encoding":
return false;
default:
return true;
}
}
public static boolean isValidResponseHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "content-length":
case "server":
case "transfer-encoding":
return false;
default:
return true;
}
}
@RunWith(MockitoJUnitRunner.class)
public static class TestUnit
{
@Mock
HttpResponse proxyResp;
@Mock
HttpRequestMessage request;
@Test
public void testIsValidResponseHeader()
{
Assert.assertTrue(isValidResponseHeader("test"));
Assert.assertFalse(isValidResponseHeader("content-length"));
Assert.assertFalse(isValidResponseHeader("connection"));
}
}
}
|
package com.netflix.zuul.util;
import com.netflix.client.http.HttpResponse;
import com.netflix.zuul.context.HttpRequestMessage;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
* User: [email protected]
* Date: 6/8/15
* Time: 11:50 AM
*/
public class ProxyUtils
{
public static boolean isValidRequestHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "content-length":
case "transfer-encoding":
return false;
default:
return true;
}
}
public static boolean isValidResponseHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "keep-alive":
case "content-length":
case "server":
case "transfer-encoding":
return false;
default:
return true;
}
}
@RunWith(MockitoJUnitRunner.class)
public static class TestUnit
{
@Mock
HttpResponse proxyResp;
@Mock
HttpRequestMessage request;
@Test
public void testIsValidResponseHeader()
{
Assert.assertTrue(isValidResponseHeader("test"));
Assert.assertFalse(isValidResponseHeader("content-length"));
Assert.assertFalse(isValidResponseHeader("connection"));
}
}
}
|
Exclude 'keep-alive' as a proxied response header.
|
Exclude 'keep-alive' as a proxied response header.
|
Java
|
apache-2.0
|
Netflix/zuul,NiteshKant/zuul,NiteshKant/zuul
|
java
|
## Code Before:
package com.netflix.zuul.util;
import com.netflix.client.http.HttpResponse;
import com.netflix.zuul.context.HttpRequestMessage;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
* User: [email protected]
* Date: 6/8/15
* Time: 11:50 AM
*/
public class ProxyUtils
{
public static boolean isValidRequestHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "content-length":
case "transfer-encoding":
return false;
default:
return true;
}
}
public static boolean isValidResponseHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "content-length":
case "server":
case "transfer-encoding":
return false;
default:
return true;
}
}
@RunWith(MockitoJUnitRunner.class)
public static class TestUnit
{
@Mock
HttpResponse proxyResp;
@Mock
HttpRequestMessage request;
@Test
public void testIsValidResponseHeader()
{
Assert.assertTrue(isValidResponseHeader("test"));
Assert.assertFalse(isValidResponseHeader("content-length"));
Assert.assertFalse(isValidResponseHeader("connection"));
}
}
}
## Instruction:
Exclude 'keep-alive' as a proxied response header.
## Code After:
package com.netflix.zuul.util;
import com.netflix.client.http.HttpResponse;
import com.netflix.zuul.context.HttpRequestMessage;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
* User: [email protected]
* Date: 6/8/15
* Time: 11:50 AM
*/
public class ProxyUtils
{
public static boolean isValidRequestHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "content-length":
case "transfer-encoding":
return false;
default:
return true;
}
}
public static boolean isValidResponseHeader(String headerName)
{
switch (headerName.toLowerCase()) {
case "connection":
case "keep-alive":
case "content-length":
case "server":
case "transfer-encoding":
return false;
default:
return true;
}
}
@RunWith(MockitoJUnitRunner.class)
public static class TestUnit
{
@Mock
HttpResponse proxyResp;
@Mock
HttpRequestMessage request;
@Test
public void testIsValidResponseHeader()
{
Assert.assertTrue(isValidResponseHeader("test"));
Assert.assertFalse(isValidResponseHeader("content-length"));
Assert.assertFalse(isValidResponseHeader("connection"));
}
}
}
|
// ... existing code ...
{
switch (headerName.toLowerCase()) {
case "connection":
case "keep-alive":
case "content-length":
case "server":
case "transfer-encoding":
// ... rest of the code ...
|
91d104a25db499ccef54878dcbfce42dbb4aa932
|
taskin/task.py
|
taskin/task.py
|
import abc
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class MapTask(object):
def __init__(self, args, task):
self.args = args
self.task = task
self.pool = Pool(cpu_count())
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
from multiprocessing import Pool as ProcessPool
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class PoolAPI(object):
def map(self, *args, **kw):
return self.pool.map(*args, **kw)
class ThreadPool(PoolAPI):
def __init__(self, size=20):
self.size = size
self.pool = ThreadPool(self.size)
class ProcessPool(PoolAPI):
def __init__(self, size=None):
self.size = size or cpu_count()
self.pool = ProcessPool(self.size)
class MapTask(object):
pool_types = [
'thread', 'process'
]
def __init__(self, args, task, pool):
self.args = args
self.task = task
self.pool = pool
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
Add totally untested pools ;)
|
Add totally untested pools ;)
|
Python
|
bsd-3-clause
|
ionrock/taskin
|
python
|
## Code Before:
import abc
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class MapTask(object):
def __init__(self, args, task):
self.args = args
self.task = task
self.pool = Pool(cpu_count())
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
## Instruction:
Add totally untested pools ;)
## Code After:
from multiprocessing import Pool as ProcessPool
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class PoolAPI(object):
def map(self, *args, **kw):
return self.pool.map(*args, **kw)
class ThreadPool(PoolAPI):
def __init__(self, size=20):
self.size = size
self.pool = ThreadPool(self.size)
class ProcessPool(PoolAPI):
def __init__(self, size=None):
self.size = size or cpu_count()
self.pool = ProcessPool(self.size)
class MapTask(object):
pool_types = [
'thread', 'process'
]
def __init__(self, args, task, pool):
self.args = args
self.task = task
self.pool = pool
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
# ... existing code ...
from multiprocessing import Pool as ProcessPool
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
def do_flow(flow, result=None):
# ... modified code ...
return result
class PoolAPI(object):
def map(self, *args, **kw):
return self.pool.map(*args, **kw)
class ThreadPool(PoolAPI):
def __init__(self, size=20):
self.size = size
self.pool = ThreadPool(self.size)
class ProcessPool(PoolAPI):
def __init__(self, size=None):
self.size = size or cpu_count()
self.pool = ProcessPool(self.size)
class MapTask(object):
pool_types = [
'thread', 'process'
]
def __init__(self, args, task, pool):
self.args = args
self.task = task
self.pool = pool
def iter_input(self, input):
for args in self.args:
...
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
# ... rest of the code ...
|
5060ef938f4f0cb880f288235391dc0c08be56c6
|
src/main/kotlin/com/github/shiraji/findpullrequest/FindPullRequestAction.kt
|
src/main/kotlin/com/github/shiraji/findpullrequest/FindPullRequestAction.kt
|
package com.github.shiraji.findpullrequest
import com.intellij.notification.Notification
import com.intellij.notification.NotificationType
import com.intellij.notification.Notifications
import com.intellij.openapi.actionSystem.AnAction
import com.intellij.openapi.actionSystem.AnActionEvent
import com.intellij.openapi.actionSystem.CommonDataKeys
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.project.Project
import git4idea.repo.GitRepository
import org.jetbrains.plugins.github.util.GithubUtil
class FindPullRequestAction : AnAction() {
override fun actionPerformed(e: AnActionEvent) {
val eventData = calcData(e)
Notifications.Bus.notify(Notification("Plugin Importer+Exporter",
"Plugin Importer+Exporter",
"EventData Repo br: " + eventData?.repository?.branches
+ " " + eventData?.repository?.remotes,
NotificationType.INFORMATION))
}
private fun calcData(e : AnActionEvent): EventData? {
val project = e.getData(CommonDataKeys.PROJECT)
project ?: return null
val virtualFile = e.getData(CommonDataKeys.VIRTUAL_FILE)
virtualFile ?: return null
val document = FileDocumentManager.getInstance().getDocument(virtualFile)
document ?: return null
val repository = GithubUtil.getGitRepository(project, virtualFile)
repository ?: return null
return EventData(project, repository)
}
private data class EventData(val project: Project, val repository: GitRepository) {
}
}
|
package com.github.shiraji.findpullrequest
import com.intellij.notification.Notification
import com.intellij.notification.NotificationType
import com.intellij.notification.Notifications
import com.intellij.openapi.actionSystem.AnAction
import com.intellij.openapi.actionSystem.AnActionEvent
import com.intellij.openapi.actionSystem.CommonDataKeys
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.project.Project
import git4idea.repo.GitRepository
import org.jetbrains.plugins.github.util.GithubUtil
class FindPullRequestAction : AnAction() {
override fun actionPerformed(e: AnActionEvent) {
val eventData = calcData(e)
val foo = eventData?.repository?.remotes?.joinToString {
it.pushUrls.toString() + "\n"
}
Notifications.Bus.notify(Notification("Plugin Importer+Exporter",
"Plugin Importer+Exporter",
"EventData: " + foo,
NotificationType.INFORMATION))
}
private fun calcData(e : AnActionEvent): EventData? {
val project = e.getData(CommonDataKeys.PROJECT)
project ?: return null
val virtualFile = e.getData(CommonDataKeys.VIRTUAL_FILE)
virtualFile ?: return null
val document = FileDocumentManager.getInstance().getDocument(virtualFile)
document ?: return null
val repository = GithubUtil.getGitRepository(project, virtualFile)
repository ?: return null
return EventData(project, repository)
}
private data class EventData(val project: Project, val repository: GitRepository) {
}
}
|
Add debug info. It may not work since there is no runtime dependancy
|
Add debug info. It may not work since there is no runtime dependancy
|
Kotlin
|
apache-2.0
|
shiraji/find-pull-request
|
kotlin
|
## Code Before:
package com.github.shiraji.findpullrequest
import com.intellij.notification.Notification
import com.intellij.notification.NotificationType
import com.intellij.notification.Notifications
import com.intellij.openapi.actionSystem.AnAction
import com.intellij.openapi.actionSystem.AnActionEvent
import com.intellij.openapi.actionSystem.CommonDataKeys
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.project.Project
import git4idea.repo.GitRepository
import org.jetbrains.plugins.github.util.GithubUtil
class FindPullRequestAction : AnAction() {
override fun actionPerformed(e: AnActionEvent) {
val eventData = calcData(e)
Notifications.Bus.notify(Notification("Plugin Importer+Exporter",
"Plugin Importer+Exporter",
"EventData Repo br: " + eventData?.repository?.branches
+ " " + eventData?.repository?.remotes,
NotificationType.INFORMATION))
}
private fun calcData(e : AnActionEvent): EventData? {
val project = e.getData(CommonDataKeys.PROJECT)
project ?: return null
val virtualFile = e.getData(CommonDataKeys.VIRTUAL_FILE)
virtualFile ?: return null
val document = FileDocumentManager.getInstance().getDocument(virtualFile)
document ?: return null
val repository = GithubUtil.getGitRepository(project, virtualFile)
repository ?: return null
return EventData(project, repository)
}
private data class EventData(val project: Project, val repository: GitRepository) {
}
}
## Instruction:
Add debug info. It may not work since there is no runtime dependancy
## Code After:
package com.github.shiraji.findpullrequest
import com.intellij.notification.Notification
import com.intellij.notification.NotificationType
import com.intellij.notification.Notifications
import com.intellij.openapi.actionSystem.AnAction
import com.intellij.openapi.actionSystem.AnActionEvent
import com.intellij.openapi.actionSystem.CommonDataKeys
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.project.Project
import git4idea.repo.GitRepository
import org.jetbrains.plugins.github.util.GithubUtil
class FindPullRequestAction : AnAction() {
override fun actionPerformed(e: AnActionEvent) {
val eventData = calcData(e)
val foo = eventData?.repository?.remotes?.joinToString {
it.pushUrls.toString() + "\n"
}
Notifications.Bus.notify(Notification("Plugin Importer+Exporter",
"Plugin Importer+Exporter",
"EventData: " + foo,
NotificationType.INFORMATION))
}
private fun calcData(e : AnActionEvent): EventData? {
val project = e.getData(CommonDataKeys.PROJECT)
project ?: return null
val virtualFile = e.getData(CommonDataKeys.VIRTUAL_FILE)
virtualFile ?: return null
val document = FileDocumentManager.getInstance().getDocument(virtualFile)
document ?: return null
val repository = GithubUtil.getGitRepository(project, virtualFile)
repository ?: return null
return EventData(project, repository)
}
private data class EventData(val project: Project, val repository: GitRepository) {
}
}
|
// ... existing code ...
override fun actionPerformed(e: AnActionEvent) {
val eventData = calcData(e)
val foo = eventData?.repository?.remotes?.joinToString {
it.pushUrls.toString() + "\n"
}
Notifications.Bus.notify(Notification("Plugin Importer+Exporter",
"Plugin Importer+Exporter",
"EventData: " + foo,
NotificationType.INFORMATION))
}
// ... rest of the code ...
|
1cb79216f992ea0f31abb28031a74f6e703582cb
|
YouKnowShit/DownloadPic.py
|
YouKnowShit/DownloadPic.py
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
Update the pic download base url.
|
Update the pic download base url.
|
Python
|
mit
|
jiangtianyu2009/PiSoftCake
|
python
|
## Code Before:
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
## Instruction:
Update the pic download base url.
## Code After:
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
...
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
...
|
55bc355fc97eb5e034e86e7c55919d8cca0edb2b
|
feincms/context_processors.py
|
feincms/context_processors.py
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
Remove deprecated appcontent_parameters context processor
|
Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.
|
Python
|
bsd-3-clause
|
matthiask/feincms2-content,mjl/feincms,feincms/feincms,joshuajonah/feincms,matthiask/feincms2-content,matthiask/feincms2-content,joshuajonah/feincms,matthiask/django-content-editor,michaelkuty/feincms,pjdelport/feincms,nickburlett/feincms,michaelkuty/feincms,michaelkuty/feincms,feincms/feincms,feincms/feincms,matthiask/django-content-editor,mjl/feincms,nickburlett/feincms,pjdelport/feincms,nickburlett/feincms,nickburlett/feincms,joshuajonah/feincms,michaelkuty/feincms,matthiask/django-content-editor,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor
|
python
|
## Code Before:
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
## Instruction:
Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.
## Code After:
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
...
}
except Page.DoesNotExist:
return {}
...
|
43edc7a519cb2e7c49a112f816c5192908ac7e6b
|
tests/test_validator.py
|
tests/test_validator.py
|
import pytest
from web_test_base import *
class TestIATIValidator(WebTestBase):
requests_to_load = {
'IATI Validator': {
'url': 'http://validator.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
|
import pytest
from web_test_base import *
class TestIATIValidator(WebTestBase):
requests_to_load = {
'IATI Validator': {
'url': 'http://validator.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
def test_contains_form(self, loaded_request):
"""
Test that the validator contains a form on each of three tabs.
"""
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="status"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="fileTab"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="extra"]/div/form')) == 1
|
Check forms on validator page
|
Check forms on validator page
Add a test to check that each of the three forms exist on the
validator page.
This test does not check whether the three forms work correctly.
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
python
|
## Code Before:
import pytest
from web_test_base import *
class TestIATIValidator(WebTestBase):
requests_to_load = {
'IATI Validator': {
'url': 'http://validator.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
## Instruction:
Check forms on validator page
Add a test to check that each of the three forms exist on the
validator page.
This test does not check whether the three forms work correctly.
## Code After:
import pytest
from web_test_base import *
class TestIATIValidator(WebTestBase):
requests_to_load = {
'IATI Validator': {
'url': 'http://validator.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
def test_contains_form(self, loaded_request):
"""
Test that the validator contains a form on each of three tabs.
"""
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="status"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="fileTab"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="extra"]/div/form')) == 1
|
// ... existing code ...
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
def test_contains_form(self, loaded_request):
"""
Test that the validator contains a form on each of three tabs.
"""
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="status"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="fileTab"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="extra"]/div/form')) == 1
// ... rest of the code ...
|
794596fd6f55806eecca1c54e155533590108eee
|
openspending/lib/unicode_dict_reader.py
|
openspending/lib/unicode_dict_reader.py
|
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, fp, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(fp, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader
|
Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader
|
Python
|
agpl-3.0
|
CivicVision/datahub,nathanhilbert/FPA_Core,USStateDept/FPA_Core,johnjohndoe/spendb,openspending/spendb,spendb/spendb,CivicVision/datahub,pudo/spendb,CivicVision/datahub,nathanhilbert/FPA_Core,pudo/spendb,openspending/spendb,johnjohndoe/spendb,spendb/spendb,nathanhilbert/FPA_Core,openspending/spendb,johnjohndoe/spendb,spendb/spendb,USStateDept/FPA_Core,pudo/spendb,USStateDept/FPA_Core
|
python
|
## Code Before:
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
## Instruction:
Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader
## Code After:
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, fp, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(fp, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
# ... existing code ...
class UnicodeDictReader(object):
def __init__(self, fp, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(fp, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
# ... rest of the code ...
|
fbbc42fd0c023f6f5f603f9dfcc961d87ca6d645
|
zou/app/blueprints/crud/custom_action.py
|
zou/app/blueprints/crud/custom_action.py
|
from zou.app.models.custom_action import CustomAction
from .base import BaseModelsResource, BaseModelResource
class CustomActionsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, CustomAction)
class CustomActionResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, CustomAction)
|
from zou.app.models.custom_action import CustomAction
from .base import BaseModelsResource, BaseModelResource
class CustomActionsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, CustomAction)
def check_permissions(self):
return True
class CustomActionResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, CustomAction)
|
Allow anyone to read custom actions
|
Allow anyone to read custom actions
|
Python
|
agpl-3.0
|
cgwire/zou
|
python
|
## Code Before:
from zou.app.models.custom_action import CustomAction
from .base import BaseModelsResource, BaseModelResource
class CustomActionsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, CustomAction)
class CustomActionResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, CustomAction)
## Instruction:
Allow anyone to read custom actions
## Code After:
from zou.app.models.custom_action import CustomAction
from .base import BaseModelsResource, BaseModelResource
class CustomActionsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, CustomAction)
def check_permissions(self):
return True
class CustomActionResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, CustomAction)
|
// ... existing code ...
def __init__(self):
BaseModelsResource.__init__(self, CustomAction)
def check_permissions(self):
return True
class CustomActionResource(BaseModelResource):
// ... rest of the code ...
|
addd55dcf27bda9e3f6cfe4301c067276fa67161
|
plugins/ua_debug_dump_pkgs.c
|
plugins/ua_debug_dump_pkgs.c
|
/* This work is licensed under a Creative Commons CCZero 1.0 Universal License.
* See http://creativecommons.org/publicdomain/zero/1.0/ for more information. */
#include "ua_util.h"
#include <ctype.h>
#include <stdio.h>
void UA_dump_hex_pkg(UA_Byte* buffer, size_t bufferLen) {
printf("--------------- HEX Package Start ---------------\n");
char ascii[17];
memset(ascii,0,17);
for (size_t i = 0; i < bufferLen; i++)
{
if (i == 0)
printf("%08zx ", i);
else if (i%16 == 0)
printf("|%s|\n%08zx ", ascii, i);
if (isprint((int)(buffer[i])))
ascii[i%16] = (char)buffer[i];
else
ascii[i%16] = '.';
printf("%02X ", (unsigned char)buffer[i]);
}
size_t fillPos = bufferLen %16;
ascii[fillPos] = 0;
for (size_t i=fillPos; i<16; i++) {
printf(" ");
}
printf("|%s|\n%08zx\n", ascii, bufferLen);
printf("--------------- HEX Package END ---------------\n");
}
|
/* This work is licensed under a Creative Commons CCZero 1.0 Universal License.
* See http://creativecommons.org/publicdomain/zero/1.0/ for more information. */
#include "ua_util.h"
#include <ctype.h>
#include <stdio.h>
void UA_dump_hex_pkg(UA_Byte* buffer, size_t bufferLen) {
printf("--------------- HEX Package Start ---------------\n");
char ascii[17];
memset(ascii,0,17);
for (size_t i = 0; i < bufferLen; i++)
{
if (i == 0)
printf("%08zx ", i);
else if (i%16 == 0)
printf(" |%s|\n%08zx ", ascii, i);
if (isprint((int)(buffer[i])))
ascii[i%16] = (char)buffer[i];
else
ascii[i%16] = '.';
if (i%8==0)
printf(" ");
printf("%02X ", (unsigned char)buffer[i]);
}
size_t fillPos = bufferLen %16;
ascii[fillPos] = 0;
for (size_t i=fillPos; i<16; i++) {
if (i%8==0)
printf(" ");
printf(" ");
}
printf(" |%s|\n%08zx\n", ascii, bufferLen);
printf("--------------- HEX Package END ---------------\n");
}
|
Align format with `hexdump -C`
|
Align format with `hexdump -C`
|
C
|
mpl-2.0
|
JGrothoff/open62541,open62541/open62541,JGrothoff/open62541,JGrothoff/open62541,jpfr/open62541,jpfr/open62541,open62541/open62541,open62541/open62541,StalderT/open62541,jpfr/open62541,JGrothoff/open62541,StalderT/open62541,StalderT/open62541,StalderT/open62541,open62541/open62541,jpfr/open62541
|
c
|
## Code Before:
/* This work is licensed under a Creative Commons CCZero 1.0 Universal License.
* See http://creativecommons.org/publicdomain/zero/1.0/ for more information. */
#include "ua_util.h"
#include <ctype.h>
#include <stdio.h>
void UA_dump_hex_pkg(UA_Byte* buffer, size_t bufferLen) {
printf("--------------- HEX Package Start ---------------\n");
char ascii[17];
memset(ascii,0,17);
for (size_t i = 0; i < bufferLen; i++)
{
if (i == 0)
printf("%08zx ", i);
else if (i%16 == 0)
printf("|%s|\n%08zx ", ascii, i);
if (isprint((int)(buffer[i])))
ascii[i%16] = (char)buffer[i];
else
ascii[i%16] = '.';
printf("%02X ", (unsigned char)buffer[i]);
}
size_t fillPos = bufferLen %16;
ascii[fillPos] = 0;
for (size_t i=fillPos; i<16; i++) {
printf(" ");
}
printf("|%s|\n%08zx\n", ascii, bufferLen);
printf("--------------- HEX Package END ---------------\n");
}
## Instruction:
Align format with `hexdump -C`
## Code After:
/* This work is licensed under a Creative Commons CCZero 1.0 Universal License.
* See http://creativecommons.org/publicdomain/zero/1.0/ for more information. */
#include "ua_util.h"
#include <ctype.h>
#include <stdio.h>
void UA_dump_hex_pkg(UA_Byte* buffer, size_t bufferLen) {
printf("--------------- HEX Package Start ---------------\n");
char ascii[17];
memset(ascii,0,17);
for (size_t i = 0; i < bufferLen; i++)
{
if (i == 0)
printf("%08zx ", i);
else if (i%16 == 0)
printf(" |%s|\n%08zx ", ascii, i);
if (isprint((int)(buffer[i])))
ascii[i%16] = (char)buffer[i];
else
ascii[i%16] = '.';
if (i%8==0)
printf(" ");
printf("%02X ", (unsigned char)buffer[i]);
}
size_t fillPos = bufferLen %16;
ascii[fillPos] = 0;
for (size_t i=fillPos; i<16; i++) {
if (i%8==0)
printf(" ");
printf(" ");
}
printf(" |%s|\n%08zx\n", ascii, bufferLen);
printf("--------------- HEX Package END ---------------\n");
}
|
...
if (i == 0)
printf("%08zx ", i);
else if (i%16 == 0)
printf(" |%s|\n%08zx ", ascii, i);
if (isprint((int)(buffer[i])))
ascii[i%16] = (char)buffer[i];
else
ascii[i%16] = '.';
if (i%8==0)
printf(" ");
printf("%02X ", (unsigned char)buffer[i]);
}
size_t fillPos = bufferLen %16;
ascii[fillPos] = 0;
for (size_t i=fillPos; i<16; i++) {
if (i%8==0)
printf(" ");
printf(" ");
}
printf(" |%s|\n%08zx\n", ascii, bufferLen);
printf("--------------- HEX Package END ---------------\n");
}
...
|
c416c998d73e27713fd57ec97c70bacb2390f8c9
|
DashDoc.py
|
DashDoc.py
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
Use Dash's new CamelCase convention to lookup words that contain whitespace
|
Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"
|
Python
|
apache-2.0
|
farcaller/DashDoc
|
python
|
## Code Before:
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
## Instruction:
Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"
## Code After:
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
...
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
...
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
...
|
22465e0ae238a6584a8549796f4dfbae21db73dc
|
ooni/tests/test_geoip.py
|
ooni/tests/test_geoip.py
|
import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
|
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
Add unittests for geoip database version
|
Add unittests for geoip database version
|
Python
|
bsd-2-clause
|
juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe
|
python
|
## Code Before:
import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
## Instruction:
Add unittests for geoip database version
## Code After:
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
// ... existing code ...
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
// ... modified code ...
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
// ... rest of the code ...
|
9b86f2f27bdc1116be1b388e0e66c34b10006ba6
|
Plugins/org.mitk.gui.qt.multilabelsegmentation/src/internal/QmitkCreateMultiLabelSegmentationAction.h
|
Plugins/org.mitk.gui.qt.multilabelsegmentation/src/internal/QmitkCreateMultiLabelSegmentationAction.h
|
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#ifndef QMITK_CreateMultiLabelSegmentation_H
#define QMITK_CreateMultiLabelSegmentation_H
#include "mitkIContextMenuAction.h"
#include "org_mitk_gui_qt_multilabelsegmentation_Export.h"
#include "vector"
#include "mitkDataNode.h"
class MITK_QT_SEGMENTATION QmitkCreateMultiLabelSegmentationAction : public QObject, public mitk::IContextMenuAction
{
Q_OBJECT
Q_INTERFACES(mitk::IContextMenuAction)
public:
QmitkCreateMultiLabelSegmentationAction();
virtual ~QmitkCreateMultiLabelSegmentationAction();
//interface methods
virtual void Run( const QList<mitk::DataNode::Pointer>& selectedNodes );
virtual void SetDataStorage(mitk::DataStorage* dataStorage);
virtual void SetFunctionality(berry::QtViewPart* functionality);
virtual void SetSmoothed(bool smoothed);
virtual void SetDecimated(bool decimated);
private:
typedef QList<mitk::DataNode::Pointer> NodeList;
mitk::DataStorage::Pointer m_DataStorage;
};
#endif // QMITK_CreateMultiLabelSegmentation_H
|
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#ifndef QMITK_CreateMultiLabelSegmentation_H
#define QMITK_CreateMultiLabelSegmentation_H
#include "mitkIContextMenuAction.h"
#include "org_mitk_gui_qt_multilabelsegmentation_Export.h"
#include "vector"
#include "mitkDataNode.h"
class MITK_QT_SEGMENTATION QmitkCreateMultiLabelSegmentationAction : public QObject, public mitk::IContextMenuAction
{
Q_OBJECT
Q_INTERFACES(mitk::IContextMenuAction)
public:
QmitkCreateMultiLabelSegmentationAction();
virtual ~QmitkCreateMultiLabelSegmentationAction();
//interface methods
virtual void Run( const QList<mitk::DataNode::Pointer>& selectedNodes ) override;
virtual void SetDataStorage(mitk::DataStorage* dataStorage) override;
virtual void SetFunctionality(berry::QtViewPart* functionality) override;
virtual void SetSmoothed(bool smoothed) override;
virtual void SetDecimated(bool decimated) override;
private:
typedef QList<mitk::DataNode::Pointer> NodeList;
mitk::DataStorage::Pointer m_DataStorage;
};
#endif // QMITK_CreateMultiLabelSegmentation_H
|
Add override keyword to overridden methods.
|
Add override keyword to overridden methods.
|
C
|
bsd-3-clause
|
iwegner/MITK,iwegner/MITK,fmilano/mitk,RabadanLab/MITKats,RabadanLab/MITKats,MITK/MITK,MITK/MITK,MITK/MITK,RabadanLab/MITKats,RabadanLab/MITKats,RabadanLab/MITKats,iwegner/MITK,fmilano/mitk,RabadanLab/MITKats,fmilano/mitk,fmilano/mitk,MITK/MITK,MITK/MITK,iwegner/MITK,iwegner/MITK,fmilano/mitk,fmilano/mitk,MITK/MITK,fmilano/mitk,iwegner/MITK
|
c
|
## Code Before:
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#ifndef QMITK_CreateMultiLabelSegmentation_H
#define QMITK_CreateMultiLabelSegmentation_H
#include "mitkIContextMenuAction.h"
#include "org_mitk_gui_qt_multilabelsegmentation_Export.h"
#include "vector"
#include "mitkDataNode.h"
class MITK_QT_SEGMENTATION QmitkCreateMultiLabelSegmentationAction : public QObject, public mitk::IContextMenuAction
{
Q_OBJECT
Q_INTERFACES(mitk::IContextMenuAction)
public:
QmitkCreateMultiLabelSegmentationAction();
virtual ~QmitkCreateMultiLabelSegmentationAction();
//interface methods
virtual void Run( const QList<mitk::DataNode::Pointer>& selectedNodes );
virtual void SetDataStorage(mitk::DataStorage* dataStorage);
virtual void SetFunctionality(berry::QtViewPart* functionality);
virtual void SetSmoothed(bool smoothed);
virtual void SetDecimated(bool decimated);
private:
typedef QList<mitk::DataNode::Pointer> NodeList;
mitk::DataStorage::Pointer m_DataStorage;
};
#endif // QMITK_CreateMultiLabelSegmentation_H
## Instruction:
Add override keyword to overridden methods.
## Code After:
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#ifndef QMITK_CreateMultiLabelSegmentation_H
#define QMITK_CreateMultiLabelSegmentation_H
#include "mitkIContextMenuAction.h"
#include "org_mitk_gui_qt_multilabelsegmentation_Export.h"
#include "vector"
#include "mitkDataNode.h"
class MITK_QT_SEGMENTATION QmitkCreateMultiLabelSegmentationAction : public QObject, public mitk::IContextMenuAction
{
Q_OBJECT
Q_INTERFACES(mitk::IContextMenuAction)
public:
QmitkCreateMultiLabelSegmentationAction();
virtual ~QmitkCreateMultiLabelSegmentationAction();
//interface methods
virtual void Run( const QList<mitk::DataNode::Pointer>& selectedNodes ) override;
virtual void SetDataStorage(mitk::DataStorage* dataStorage) override;
virtual void SetFunctionality(berry::QtViewPart* functionality) override;
virtual void SetSmoothed(bool smoothed) override;
virtual void SetDecimated(bool decimated) override;
private:
typedef QList<mitk::DataNode::Pointer> NodeList;
mitk::DataStorage::Pointer m_DataStorage;
};
#endif // QMITK_CreateMultiLabelSegmentation_H
|
# ... existing code ...
virtual ~QmitkCreateMultiLabelSegmentationAction();
//interface methods
virtual void Run( const QList<mitk::DataNode::Pointer>& selectedNodes ) override;
virtual void SetDataStorage(mitk::DataStorage* dataStorage) override;
virtual void SetFunctionality(berry::QtViewPart* functionality) override;
virtual void SetSmoothed(bool smoothed) override;
virtual void SetDecimated(bool decimated) override;
private:
# ... rest of the code ...
|
c231987c532885fa7bc5e8d2afc8b7a30a2ce297
|
bayesian_methods_for_hackers/simulate_messages_ch02.py
|
bayesian_methods_for_hackers/simulate_messages_ch02.py
|
import json
import matplotlib
import numpy as np
import pymc as pm
from matplotlib import pyplot as plt
def main():
matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json'
matplotlib.rcParams.update(json.load(open(matplotlibrc_path)))
tau = pm.rdiscrete_uniform(0, 80)
print tau
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
print lambda_1, lambda_2
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
def plot_artificial_sms_dataset():
tau = pm.rdiscrete_uniform(0, 80)
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
plt.bar(np.arange(80), data, color="#348ABD")
plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed")
plt.xlim(0, 80)
plt.title("More example of artificial datasets")
for i in range(1, 5):
plt.subplot(4, 1, i)
plot_artificial_sms_dataset()
plt.show()
if __name__ == '__main__':
main()
|
import json
import matplotlib
import numpy as np
import pymc as pm
from matplotlib import pyplot as plt
def main():
tau = pm.rdiscrete_uniform(0, 80)
print tau
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
print lambda_1, lambda_2
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
def plot_artificial_sms_dataset():
tau = pm.rdiscrete_uniform(0, 80)
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
plt.bar(np.arange(80), data, color="#348ABD")
plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed")
plt.xlim(0, 80)
plt.title("More example of artificial datasets")
for i in range(1, 5):
plt.subplot(4, 1, i)
plot_artificial_sms_dataset()
plt.show()
if __name__ == '__main__':
main()
|
Change of repo name. Update effected paths
|
Change of repo name. Update effected paths
|
Python
|
mit
|
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
|
python
|
## Code Before:
import json
import matplotlib
import numpy as np
import pymc as pm
from matplotlib import pyplot as plt
def main():
matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json'
matplotlib.rcParams.update(json.load(open(matplotlibrc_path)))
tau = pm.rdiscrete_uniform(0, 80)
print tau
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
print lambda_1, lambda_2
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
def plot_artificial_sms_dataset():
tau = pm.rdiscrete_uniform(0, 80)
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
plt.bar(np.arange(80), data, color="#348ABD")
plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed")
plt.xlim(0, 80)
plt.title("More example of artificial datasets")
for i in range(1, 5):
plt.subplot(4, 1, i)
plot_artificial_sms_dataset()
plt.show()
if __name__ == '__main__':
main()
## Instruction:
Change of repo name. Update effected paths
## Code After:
import json
import matplotlib
import numpy as np
import pymc as pm
from matplotlib import pyplot as plt
def main():
tau = pm.rdiscrete_uniform(0, 80)
print tau
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
print lambda_1, lambda_2
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
def plot_artificial_sms_dataset():
tau = pm.rdiscrete_uniform(0, 80)
alpha = 1. / 20.
lambda_1, lambda_2 = pm.rexponential(alpha, 2)
data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)]
plt.bar(np.arange(80), data, color="#348ABD")
plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed")
plt.xlim(0, 80)
plt.title("More example of artificial datasets")
for i in range(1, 5):
plt.subplot(4, 1, i)
plot_artificial_sms_dataset()
plt.show()
if __name__ == '__main__':
main()
|
// ... existing code ...
def main():
tau = pm.rdiscrete_uniform(0, 80)
print tau
// ... rest of the code ...
|
49e95022577eb40bcf9e1d1c9f95be7269fd0e3b
|
scripts/update_acq_stats.py
|
scripts/update_acq_stats.py
|
from mica.stats import update_acq_stats
update_acq_stats.main()
import os
table_file = mica.stats.acq_stats.table_file
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
Fix reference to acq table file in script
|
Fix reference to acq table file in script
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
python
|
## Code Before:
from mica.stats import update_acq_stats
update_acq_stats.main()
import os
table_file = mica.stats.acq_stats.table_file
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
## Instruction:
Fix reference to acq table file in script
## Code After:
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
# ... existing code ...
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
# ... rest of the code ...
|
9e695ef364494687269e156440186a454db66278
|
src/main/java/org/commcare/modern/parse/ParseUtilsHelper.java
|
src/main/java/org/commcare/modern/parse/ParseUtilsHelper.java
|
package org.commcare.modern.parse;
import org.commcare.core.interfaces.UserSandbox;
import org.commcare.core.parse.ParseUtils;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.xmlpull.v1.XmlPullParserException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
/**
* Convenience methods, mostly for touchforms so we don't have to deal with Java IO
* in Jython which is terrible
*
* Used by touchforms
*
* Created by wpride1 on 8/20/15.
*/
@SuppressWarnings("unused")
public class ParseUtilsHelper extends ParseUtils {
public static void parseXMLIntoSandbox(String restore, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
InputStream stream = new ByteArrayInputStream(restore.getBytes(StandardCharsets.UTF_8));
parseIntoSandbox(stream, sandbox);
}
public static void parseFileIntoSandbox(File restore, UserSandbox sandbox)
throws IOException, InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException {
InputStream stream = new FileInputStream(restore);
parseIntoSandbox(stream, sandbox);
}
}
|
package org.commcare.modern.parse;
import org.commcare.core.interfaces.UserSandbox;
import org.commcare.core.parse.ParseUtils;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.xmlpull.v1.XmlPullParserException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
/**
* Convenience methods, mostly for touchforms so we don't have to deal with Java IO
* in Jython which is terrible
*
* Used by touchforms
*
* Created by wpride1 on 8/20/15.
*/
@SuppressWarnings("unused")
public class ParseUtilsHelper extends ParseUtils {
public static void parseXMLIntoSandbox(InputStream restoreStream, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
parseIntoSandbox(restoreStream, sandbox);
}
public static void parseXMLIntoSandbox(String restore, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
InputStream stream = new ByteArrayInputStream(restore.getBytes(StandardCharsets.UTF_8));
parseIntoSandbox(stream, sandbox);
}
public static void parseFileIntoSandbox(File restore, UserSandbox sandbox)
throws IOException, InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException {
InputStream stream = new FileInputStream(restore);
parseIntoSandbox(stream, sandbox);
}
}
|
Add method to accept stream
|
Add method to accept stream
|
Java
|
apache-2.0
|
dimagi/commcare,dimagi/commcare,dimagi/commcare-core,dimagi/commcare,dimagi/commcare-core,dimagi/commcare-core
|
java
|
## Code Before:
package org.commcare.modern.parse;
import org.commcare.core.interfaces.UserSandbox;
import org.commcare.core.parse.ParseUtils;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.xmlpull.v1.XmlPullParserException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
/**
* Convenience methods, mostly for touchforms so we don't have to deal with Java IO
* in Jython which is terrible
*
* Used by touchforms
*
* Created by wpride1 on 8/20/15.
*/
@SuppressWarnings("unused")
public class ParseUtilsHelper extends ParseUtils {
public static void parseXMLIntoSandbox(String restore, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
InputStream stream = new ByteArrayInputStream(restore.getBytes(StandardCharsets.UTF_8));
parseIntoSandbox(stream, sandbox);
}
public static void parseFileIntoSandbox(File restore, UserSandbox sandbox)
throws IOException, InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException {
InputStream stream = new FileInputStream(restore);
parseIntoSandbox(stream, sandbox);
}
}
## Instruction:
Add method to accept stream
## Code After:
package org.commcare.modern.parse;
import org.commcare.core.interfaces.UserSandbox;
import org.commcare.core.parse.ParseUtils;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.xmlpull.v1.XmlPullParserException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
/**
* Convenience methods, mostly for touchforms so we don't have to deal with Java IO
* in Jython which is terrible
*
* Used by touchforms
*
* Created by wpride1 on 8/20/15.
*/
@SuppressWarnings("unused")
public class ParseUtilsHelper extends ParseUtils {
public static void parseXMLIntoSandbox(InputStream restoreStream, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
parseIntoSandbox(restoreStream, sandbox);
}
public static void parseXMLIntoSandbox(String restore, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
InputStream stream = new ByteArrayInputStream(restore.getBytes(StandardCharsets.UTF_8));
parseIntoSandbox(stream, sandbox);
}
public static void parseFileIntoSandbox(File restore, UserSandbox sandbox)
throws IOException, InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException {
InputStream stream = new FileInputStream(restore);
parseIntoSandbox(stream, sandbox);
}
}
|
// ... existing code ...
*/
@SuppressWarnings("unused")
public class ParseUtilsHelper extends ParseUtils {
public static void parseXMLIntoSandbox(InputStream restoreStream, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
parseIntoSandbox(restoreStream, sandbox);
}
public static void parseXMLIntoSandbox(String restore, UserSandbox sandbox)
throws InvalidStructureException, UnfullfilledRequirementsException, XmlPullParserException, IOException {
InputStream stream = new ByteArrayInputStream(restore.getBytes(StandardCharsets.UTF_8));
// ... rest of the code ...
|
89914c3f2a4309913071bdf6a1252bb7c754df94
|
src/net/hillsdon/fij/core/TestIterableUtils.java
|
src/net/hillsdon/fij/core/TestIterableUtils.java
|
package net.hillsdon.fij.core;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static net.hillsdon.fij.core.Functional.list;
import static net.hillsdon.fij.core.IterableUtils.reversed;
import java.util.List;
import junit.framework.TestCase;
public class TestIterableUtils extends TestCase {
public void testReversed() {
List<Integer> data = asList(1, 2, 3);
assertEquals(asList(3, 2, 1), list(reversed(data)));
assertEquals(emptyList(), list(reversed(emptyList())));
}
}
|
package net.hillsdon.fij.core;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static net.hillsdon.fij.core.Functional.list;
import static net.hillsdon.fij.core.IterableUtils.reversed;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import junit.framework.TestCase;
public class TestIterableUtils extends TestCase {
public void testReversed() {
List<Integer> data = new ArrayList<Integer>(asList(1, 2, 3));
assertEquals(asList(3, 2, 1), list(reversed(data)));
assertEquals(emptyList(), list(reversed(emptyList())));
Iterator<Integer> iter = reversed(data).iterator();
iter.next();
iter.next();
iter.remove();
iter.next();
assertFalse(iter.hasNext());
assertEquals(asList(1, 3), data);
}
}
|
Test we delegate remove too.
|
Test we delegate remove too.
|
Java
|
apache-2.0
|
ashirley/reviki,strr/reviki,paulcadman/reviki,ashirley/reviki,paulcadman/reviki,CoreFiling/reviki,ashirley/reviki,CoreFiling/reviki,ashirley/reviki,strr/reviki,strr/reviki,CoreFiling/reviki,strr/reviki,strr/reviki,ashirley/reviki,CoreFiling/reviki,CoreFiling/reviki,paulcadman/reviki,paulcadman/reviki
|
java
|
## Code Before:
package net.hillsdon.fij.core;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static net.hillsdon.fij.core.Functional.list;
import static net.hillsdon.fij.core.IterableUtils.reversed;
import java.util.List;
import junit.framework.TestCase;
public class TestIterableUtils extends TestCase {
public void testReversed() {
List<Integer> data = asList(1, 2, 3);
assertEquals(asList(3, 2, 1), list(reversed(data)));
assertEquals(emptyList(), list(reversed(emptyList())));
}
}
## Instruction:
Test we delegate remove too.
## Code After:
package net.hillsdon.fij.core;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static net.hillsdon.fij.core.Functional.list;
import static net.hillsdon.fij.core.IterableUtils.reversed;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import junit.framework.TestCase;
public class TestIterableUtils extends TestCase {
public void testReversed() {
List<Integer> data = new ArrayList<Integer>(asList(1, 2, 3));
assertEquals(asList(3, 2, 1), list(reversed(data)));
assertEquals(emptyList(), list(reversed(emptyList())));
Iterator<Integer> iter = reversed(data).iterator();
iter.next();
iter.next();
iter.remove();
iter.next();
assertFalse(iter.hasNext());
assertEquals(asList(1, 3), data);
}
}
|
...
import static net.hillsdon.fij.core.Functional.list;
import static net.hillsdon.fij.core.IterableUtils.reversed;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import junit.framework.TestCase;
...
public class TestIterableUtils extends TestCase {
public void testReversed() {
List<Integer> data = new ArrayList<Integer>(asList(1, 2, 3));
assertEquals(asList(3, 2, 1), list(reversed(data)));
assertEquals(emptyList(), list(reversed(emptyList())));
Iterator<Integer> iter = reversed(data).iterator();
iter.next();
iter.next();
iter.remove();
iter.next();
assertFalse(iter.hasNext());
assertEquals(asList(1, 3), data);
}
}
...
|
2adf6a96653c17e255aa1225fc37f128d331eb2b
|
myapplication/src/main/java/com/example/myapplication/MainActivity.java
|
myapplication/src/main/java/com/example/myapplication/MainActivity.java
|
package com.example.myapplication;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// 2 b f
}
}
|
package com.example.myapplication;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Add feat main 1
// 2 b f
}
}
|
Rebase add 1 You are currently rebasing branch 'master' on '924427b'.
|
Rebase add 1 You are currently rebasing branch 'master' on '924427b'.
|
Java
|
apache-2.0
|
Julia1632/testr
|
java
|
## Code Before:
package com.example.myapplication;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// 2 b f
}
}
## Instruction:
Rebase add 1 You are currently rebasing branch 'master' on '924427b'.
## Code After:
package com.example.myapplication;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Add feat main 1
// 2 b f
}
}
|
# ... existing code ...
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Add feat main 1
// 2 b f
# ... rest of the code ...
|
15546cf2407ab6aa9459de25d88961aa208516b6
|
src/org/intellij/lang/jflex/fileTypes/JFlexFileTypeFactory.java
|
src/org/intellij/lang/jflex/fileTypes/JFlexFileTypeFactory.java
|
package org.intellij.lang.jflex.fileTypes;
import com.intellij.openapi.fileTypes.FileTypeConsumer;
import com.intellij.openapi.fileTypes.FileTypeFactory;
import org.jetbrains.annotations.NotNull;
/**
* JFlex file type factory, tells IDEA about new file type
*
* @author Jan Dolecek
*/
public final class JFlexFileTypeFactory extends FileTypeFactory {
@Override
public void createFileTypes(@NotNull FileTypeConsumer consumer) {
consumer.consume(JFlexFileType.FILE_TYPE, JFlexFileType.DEFAULT_EXTENSION);
}
}
|
package org.intellij.lang.jflex.fileTypes;
import com.intellij.openapi.fileTypes.FileTypeConsumer;
import com.intellij.openapi.fileTypes.FileTypeFactory;
import org.jetbrains.annotations.NotNull;
/**
* JFlex file type factory, tells IDEA about new file type
*
* @author Jan Dolecek
*/
public final class JFlexFileTypeFactory extends FileTypeFactory {
@Override
public void createFileTypes(@NotNull FileTypeConsumer consumer) {
consumer.consume(JFlexFileType.FILE_TYPE, "flex;jflex");
}
}
|
Add .jflex as a supported file extension (in addition to .flex).
|
Add .jflex as a supported file extension (in addition to .flex).
|
Java
|
apache-2.0
|
aefimov/idea-jflex
|
java
|
## Code Before:
package org.intellij.lang.jflex.fileTypes;
import com.intellij.openapi.fileTypes.FileTypeConsumer;
import com.intellij.openapi.fileTypes.FileTypeFactory;
import org.jetbrains.annotations.NotNull;
/**
* JFlex file type factory, tells IDEA about new file type
*
* @author Jan Dolecek
*/
public final class JFlexFileTypeFactory extends FileTypeFactory {
@Override
public void createFileTypes(@NotNull FileTypeConsumer consumer) {
consumer.consume(JFlexFileType.FILE_TYPE, JFlexFileType.DEFAULT_EXTENSION);
}
}
## Instruction:
Add .jflex as a supported file extension (in addition to .flex).
## Code After:
package org.intellij.lang.jflex.fileTypes;
import com.intellij.openapi.fileTypes.FileTypeConsumer;
import com.intellij.openapi.fileTypes.FileTypeFactory;
import org.jetbrains.annotations.NotNull;
/**
* JFlex file type factory, tells IDEA about new file type
*
* @author Jan Dolecek
*/
public final class JFlexFileTypeFactory extends FileTypeFactory {
@Override
public void createFileTypes(@NotNull FileTypeConsumer consumer) {
consumer.consume(JFlexFileType.FILE_TYPE, "flex;jflex");
}
}
|
...
public final class JFlexFileTypeFactory extends FileTypeFactory {
@Override
public void createFileTypes(@NotNull FileTypeConsumer consumer) {
consumer.consume(JFlexFileType.FILE_TYPE, "flex;jflex");
}
}
...
|
594cd5d490786bbbdcf877d8c155530c36acd2c1
|
src/services/TemperatureMonitor/src/temperature.py
|
src/services/TemperatureMonitor/src/temperature.py
|
import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return result
|
import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result
|
Remove Smoothing From Temp Sensor
|
Remove Smoothing From Temp Sensor
|
Python
|
mit
|
IAPark/PITherm
|
python
|
## Code Before:
import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return result
## Instruction:
Remove Smoothing From Temp Sensor
## Code After:
import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result
|
...
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result
...
|
1bae1bc94fea8fd9b8fe25d832ebfce07ad08a1c
|
utilities/build/update_xform_translate_path.py
|
utilities/build/update_xform_translate_path.py
|
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
|
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
Fix up local.ini updater code to look specifically for 'xform_translate_path'
|
Fix up local.ini updater code to look specifically for 'xform_translate_path'
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest
|
python
|
## Code Before:
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
## Instruction:
Fix up local.ini updater code to look specifically for 'xform_translate_path'
## Code After:
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
// ... existing code ...
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
// ... modified code ...
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
// ... rest of the code ...
|
ad7f53cdb35644304350b077a9ac11e5ec588036
|
tests/include/alt-dummy/ecp_alt.h
|
tests/include/alt-dummy/ecp_alt.h
|
/* ecp_alt.h with dummy types for MBEDTLS_ECP_ALT */
/*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ECP_ALT_H
#define ECP_ALT_H
typedef struct mbedtls_ecp_group
{
int dummy;
}
mbedtls_ecp_group;
#if !defined(MBEDTLS_ECP_WINDOW_SIZE)
#define MBEDTLS_ECP_WINDOW_SIZE 6
#endif
#if !defined(MBEDTLS_ECP_FIXED_POINT_OPTIM)
#define MBEDTLS_ECP_FIXED_POINT_OPTIM 1
#endif
#endif /* ecp_alt.h */
|
/* ecp_alt.h with dummy types for MBEDTLS_ECP_ALT */
/*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ECP_ALT_H
#define ECP_ALT_H
typedef struct mbedtls_ecp_group
{
const mbedtls_ecp_group_id id;
const mbedtls_mpi P;
const mbedtls_mpi A;
const mbedtls_mpi B;
const mbedtls_ecp_point G;
const mbedtls_mpi N;
const size_t pbits;
const size_t nbits;
}
mbedtls_ecp_group;
#endif /* ecp_alt.h */
|
Define public fields of mbedtls_ecp_group in alt test header
|
Define public fields of mbedtls_ecp_group in alt test header
And don't define configuration macros that only apply to the built-in
implementation.
Signed-off-by: Gilles Peskine <[email protected]>
|
C
|
apache-2.0
|
Mbed-TLS/mbedtls,ARMmbed/mbedtls,NXPmicro/mbedtls,ARMmbed/mbedtls,ARMmbed/mbedtls,NXPmicro/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls,NXPmicro/mbedtls,Mbed-TLS/mbedtls,Mbed-TLS/mbedtls,NXPmicro/mbedtls
|
c
|
## Code Before:
/* ecp_alt.h with dummy types for MBEDTLS_ECP_ALT */
/*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ECP_ALT_H
#define ECP_ALT_H
typedef struct mbedtls_ecp_group
{
int dummy;
}
mbedtls_ecp_group;
#if !defined(MBEDTLS_ECP_WINDOW_SIZE)
#define MBEDTLS_ECP_WINDOW_SIZE 6
#endif
#if !defined(MBEDTLS_ECP_FIXED_POINT_OPTIM)
#define MBEDTLS_ECP_FIXED_POINT_OPTIM 1
#endif
#endif /* ecp_alt.h */
## Instruction:
Define public fields of mbedtls_ecp_group in alt test header
And don't define configuration macros that only apply to the built-in
implementation.
Signed-off-by: Gilles Peskine <[email protected]>
## Code After:
/* ecp_alt.h with dummy types for MBEDTLS_ECP_ALT */
/*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ECP_ALT_H
#define ECP_ALT_H
typedef struct mbedtls_ecp_group
{
const mbedtls_ecp_group_id id;
const mbedtls_mpi P;
const mbedtls_mpi A;
const mbedtls_mpi B;
const mbedtls_ecp_point G;
const mbedtls_mpi N;
const size_t pbits;
const size_t nbits;
}
mbedtls_ecp_group;
#endif /* ecp_alt.h */
|
...
typedef struct mbedtls_ecp_group
{
const mbedtls_ecp_group_id id;
const mbedtls_mpi P;
const mbedtls_mpi A;
const mbedtls_mpi B;
const mbedtls_ecp_point G;
const mbedtls_mpi N;
const size_t pbits;
const size_t nbits;
}
mbedtls_ecp_group;
#endif /* ecp_alt.h */
...
|
cd5e52c8e1d481c8e1bf1e7a71b0c421e53c93c9
|
featureflow/__init__.py
|
featureflow/__init__.py
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
Add EventLog stuff to package-level exports
|
Add EventLog stuff to package-level exports
|
Python
|
mit
|
JohnVinyard/featureflow,JohnVinyard/featureflow
|
python
|
## Code Before:
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
## Instruction:
Add EventLog stuff to package-level exports
## Code After:
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
// ... existing code ...
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
// ... rest of the code ...
|
4716ee58e29f8fffaae7e7dc6131784212a5d907
|
test/com/bjorn/GameTest.java
|
test/com/bjorn/GameTest.java
|
package com.bjorn;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import javax.xml.bind.annotation.XmlAttribute;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class GameTest {
UI mockUI;
Board mockBoard;
Game newGame;
@Before
public void setUp() {
mockUI = Mockito.mock(UI.class);
mockBoard = Mockito.mock(Board.class);
newGame = new Game(mockUI, mockBoard);
newGame.startGame();
}
@Test
public void checkIfWelcomeIsCalled() {
verify(mockUI, times(1)).printWelcome();
}
@Test
public void checkIfPrintBoardIsCalled() {
verify(mockUI, atLeastOnce()).printBoard(mockBoard);
}
@Test
public void checkIfPromptForXCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForXCoordinate();
}
@Test
public void checkIfPromptForYCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForYCoordinate();
}
@Test
public void checkIfGetUserInputIsCalled() {
verify(mockUI, atLeastOnce()).getUserInput();
}
@Test
public void checkIfUpdateBoardStateIsCalled() {
verify(mockBoard, atLeastOnce()).upDateBoardState(0, 0, "H");
}
}
|
package com.bjorn;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class GameTest {
UI mockUI;
Board mockBoard;
Game newGame;
@Before
public void setUp() {
mockUI = Mockito.mock(UI.class);
mockBoard = Mockito.mock(Board.class);
newGame = new Game(mockUI, mockBoard);
newGame.startGame();
}
@Test
public void checkIfWelcomeIsCalled() {
verify(mockUI, times(1)).printWelcome();
}
@Test
public void checkIfPrintBoardIsCalled() {
verify(mockUI, atLeastOnce()).printBoard(mockBoard);
}
@Test
public void checkIfPromptForXCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForXCoordinate();
}
@Test
public void checkIfPromptForYCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForYCoordinate();
}
@Test
public void checkIfGetUserInputIsCalled() {
verify(mockUI, atLeastOnce()).getUserInput();
}
@Test
public void checkIfUpdateBoardStateIsCalled() {
verify(mockBoard, atLeastOnce()).upDateBoardState(0, 0, "H");
}
}
|
Remove unused imports in game tests
|
Remove unused imports in game tests
|
Java
|
mit
|
Bjornkjohnson/AgileBattleShip
|
java
|
## Code Before:
package com.bjorn;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import javax.xml.bind.annotation.XmlAttribute;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class GameTest {
UI mockUI;
Board mockBoard;
Game newGame;
@Before
public void setUp() {
mockUI = Mockito.mock(UI.class);
mockBoard = Mockito.mock(Board.class);
newGame = new Game(mockUI, mockBoard);
newGame.startGame();
}
@Test
public void checkIfWelcomeIsCalled() {
verify(mockUI, times(1)).printWelcome();
}
@Test
public void checkIfPrintBoardIsCalled() {
verify(mockUI, atLeastOnce()).printBoard(mockBoard);
}
@Test
public void checkIfPromptForXCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForXCoordinate();
}
@Test
public void checkIfPromptForYCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForYCoordinate();
}
@Test
public void checkIfGetUserInputIsCalled() {
verify(mockUI, atLeastOnce()).getUserInput();
}
@Test
public void checkIfUpdateBoardStateIsCalled() {
verify(mockBoard, atLeastOnce()).upDateBoardState(0, 0, "H");
}
}
## Instruction:
Remove unused imports in game tests
## Code After:
package com.bjorn;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class GameTest {
UI mockUI;
Board mockBoard;
Game newGame;
@Before
public void setUp() {
mockUI = Mockito.mock(UI.class);
mockBoard = Mockito.mock(Board.class);
newGame = new Game(mockUI, mockBoard);
newGame.startGame();
}
@Test
public void checkIfWelcomeIsCalled() {
verify(mockUI, times(1)).printWelcome();
}
@Test
public void checkIfPrintBoardIsCalled() {
verify(mockUI, atLeastOnce()).printBoard(mockBoard);
}
@Test
public void checkIfPromptForXCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForXCoordinate();
}
@Test
public void checkIfPromptForYCoordinateIsCalled() {
verify(mockUI, atLeastOnce()).promptForYCoordinate();
}
@Test
public void checkIfGetUserInputIsCalled() {
verify(mockUI, atLeastOnce()).getUserInput();
}
@Test
public void checkIfUpdateBoardStateIsCalled() {
verify(mockBoard, atLeastOnce()).upDateBoardState(0, 0, "H");
}
}
|
...
import org.junit.Test;
import org.mockito.Mockito;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
...
|
c78d9042dd3dc8131568770db961479e9c507fc2
|
build.gradle.kts
|
build.gradle.kts
|
buildscript {
repositories {
google()
jcenter()
}
}
plugins {
id("com.github.ben-manes.versions") version "0.22.0"
}
tasks.wrapper {
distributionType = Wrapper.DistributionType.ALL
}
tasks.register<Delete>("clean") {
delete("build")
}
tasks.register("ciBuild") {
val isMaster = System.getenv("CIRCLE_BRANCH") == "master"
val isPr = System.getenv("CIRCLE_PULL_REQUEST") != null
if (isMaster && !isPr) { // Release build
dependsOn(":plugin:build", ":plugin:publish")
} else {
dependsOn(":plugin:check")
}
}
allprojects {
repositories {
google()
jcenter()
}
}
|
buildscript {
repositories {
google()
jcenter()
}
}
plugins {
id("com.github.ben-manes.versions") version "0.22.0"
}
tasks.wrapper {
distributionType = Wrapper.DistributionType.ALL
}
tasks.register<Delete>("clean") {
delete("build")
}
tasks.register("ciBuild") {
val isMaster = System.getenv("CIRCLE_BRANCH") == "master"
val isPr = System.getenv("CIRCLE_PULL_REQUEST") != null
val isSnapshot = project("plugin").version.toString().contains("snapshot", true)
if (isMaster && !isPr) { // Release build
if (isSnapshot) {
dependsOn(":plugin:build", ":plugin:publish")
} else {
dependsOn(":plugin:build")
}
} else {
dependsOn(":plugin:check")
}
}
allprojects {
repositories {
google()
jcenter()
}
}
|
Fix bug causing non-snapshot builds to fail
|
Fix bug causing non-snapshot builds to fail
Signed-off-by: Alex Saveau <[email protected]>
|
Kotlin
|
mit
|
MaTriXy/gradle-play-publisher-1,MaTriXy/gradle-play-publisher-1,MaTriXy/gradle-play-publisher-1,Triple-T/gradle-play-publisher,Triple-T/gradle-play-publisher,Triple-T/gradle-play-publisher
|
kotlin
|
## Code Before:
buildscript {
repositories {
google()
jcenter()
}
}
plugins {
id("com.github.ben-manes.versions") version "0.22.0"
}
tasks.wrapper {
distributionType = Wrapper.DistributionType.ALL
}
tasks.register<Delete>("clean") {
delete("build")
}
tasks.register("ciBuild") {
val isMaster = System.getenv("CIRCLE_BRANCH") == "master"
val isPr = System.getenv("CIRCLE_PULL_REQUEST") != null
if (isMaster && !isPr) { // Release build
dependsOn(":plugin:build", ":plugin:publish")
} else {
dependsOn(":plugin:check")
}
}
allprojects {
repositories {
google()
jcenter()
}
}
## Instruction:
Fix bug causing non-snapshot builds to fail
Signed-off-by: Alex Saveau <[email protected]>
## Code After:
buildscript {
repositories {
google()
jcenter()
}
}
plugins {
id("com.github.ben-manes.versions") version "0.22.0"
}
tasks.wrapper {
distributionType = Wrapper.DistributionType.ALL
}
tasks.register<Delete>("clean") {
delete("build")
}
tasks.register("ciBuild") {
val isMaster = System.getenv("CIRCLE_BRANCH") == "master"
val isPr = System.getenv("CIRCLE_PULL_REQUEST") != null
val isSnapshot = project("plugin").version.toString().contains("snapshot", true)
if (isMaster && !isPr) { // Release build
if (isSnapshot) {
dependsOn(":plugin:build", ":plugin:publish")
} else {
dependsOn(":plugin:build")
}
} else {
dependsOn(":plugin:check")
}
}
allprojects {
repositories {
google()
jcenter()
}
}
|
...
tasks.register("ciBuild") {
val isMaster = System.getenv("CIRCLE_BRANCH") == "master"
val isPr = System.getenv("CIRCLE_PULL_REQUEST") != null
val isSnapshot = project("plugin").version.toString().contains("snapshot", true)
if (isMaster && !isPr) { // Release build
if (isSnapshot) {
dependsOn(":plugin:build", ":plugin:publish")
} else {
dependsOn(":plugin:build")
}
} else {
dependsOn(":plugin:check")
}
...
|
f869cf9a94749ea210d38178317d196fbdd15fac
|
resolwe/flow/tests/test_backend.py
|
resolwe/flow/tests/test_backend.py
|
import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', '[email protected]', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
shutil.rmtree(settings.FLOW['BACKEND']['DATA_PATH'])
os.makedirs(settings.FLOW['BACKEND']['DATA_PATH'])
def test_manager(self):
manager.communicate()
|
import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', '[email protected]', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
data_path = settings.FLOW['BACKEND']['DATA_PATH']
if os.path.exists(data_path):
shutil.rmtree(data_path)
os.makedirs(data_path)
def test_manager(self):
manager.communicate()
|
Fix error if no data path
|
Fix error if no data path
|
Python
|
apache-2.0
|
jberci/resolwe,jberci/resolwe,genialis/resolwe,genialis/resolwe
|
python
|
## Code Before:
import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', '[email protected]', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
shutil.rmtree(settings.FLOW['BACKEND']['DATA_PATH'])
os.makedirs(settings.FLOW['BACKEND']['DATA_PATH'])
def test_manager(self):
manager.communicate()
## Instruction:
Fix error if no data path
## Code After:
import os
import shutil
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from resolwe.flow.engine import manager
from resolwe.flow.models import Data, Tool
class ManagerTest(TestCase):
def setUp(self):
u = get_user_model().objects.create_superuser('test', '[email protected]', 'test')
t = Tool(slug='test-processor',
name='Test Processor',
contributor=u,
type='data:test',
version=1)
t.save()
d = Data(slug='test-data',
name='Test Data',
contributor=u,
tool=t)
d.save()
data_path = settings.FLOW['BACKEND']['DATA_PATH']
if os.path.exists(data_path):
shutil.rmtree(data_path)
os.makedirs(data_path)
def test_manager(self):
manager.communicate()
|
// ... existing code ...
tool=t)
d.save()
data_path = settings.FLOW['BACKEND']['DATA_PATH']
if os.path.exists(data_path):
shutil.rmtree(data_path)
os.makedirs(data_path)
def test_manager(self):
manager.communicate()
// ... rest of the code ...
|
8f6a19bade1a0591f3feba4521fdf42c157c179d
|
skyline_path/algorithms/growing_graph.py
|
skyline_path/algorithms/growing_graph.py
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
Add all_nodes and growing times param
|
Add all_nodes and growing times param
|
Python
|
mit
|
shadow3x3x3/renew-skyline-path-query
|
python
|
## Code Before:
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'
## Instruction:
Add all_nodes and growing times param
## Code After:
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
# ... existing code ...
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
# ... modified code ...
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
# ... rest of the code ...
|
574a2d98733cbab814050d29d1a24cd5c6563c4f
|
Tools/scripts/fixps.py
|
Tools/scripts/fixps.py
|
import sys
import regex
import regsub
def main():
for file in sys.argv[1:]:
try:
f = open(file, 'r+')
except IOError:
print file, ': can\'t open for update'
continue
line = f.readline()
if regex.match('^#! */usr/local/bin/python', line) < 0:
print file, ': not a /usr/local/bin/python script'
f.close()
continue
rest = f.read()
line = regsub.sub('/usr/local/bin/python',
'/usr/bin/env python', line)
print file, ':', `line`
f.seek(0)
f.write(line)
f.write(rest)
f.close()
main()
|
import sys
import re
def main():
for file in sys.argv[1:]:
try:
f = open(file, 'r')
except IOError, msg:
print file, ': can\'t open :', msg
continue
line = f.readline()
if not re.match('^#! */usr/local/bin/python', line):
print file, ': not a /usr/local/bin/python script'
f.close()
continue
rest = f.read()
f.close()
line = re.sub('/usr/local/bin/python',
'/usr/bin/env python', line)
print file, ':', `line`
f = open(file, "w")
f.write(line)
f.write(rest)
f.close()
main()
|
Use re instead of regex. Don't rewrite the file in place. (Reported by Andy Dustman.)
|
Use re instead of regex.
Don't rewrite the file in place.
(Reported by Andy Dustman.)
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
python
|
## Code Before:
import sys
import regex
import regsub
def main():
for file in sys.argv[1:]:
try:
f = open(file, 'r+')
except IOError:
print file, ': can\'t open for update'
continue
line = f.readline()
if regex.match('^#! */usr/local/bin/python', line) < 0:
print file, ': not a /usr/local/bin/python script'
f.close()
continue
rest = f.read()
line = regsub.sub('/usr/local/bin/python',
'/usr/bin/env python', line)
print file, ':', `line`
f.seek(0)
f.write(line)
f.write(rest)
f.close()
main()
## Instruction:
Use re instead of regex.
Don't rewrite the file in place.
(Reported by Andy Dustman.)
## Code After:
import sys
import re
def main():
for file in sys.argv[1:]:
try:
f = open(file, 'r')
except IOError, msg:
print file, ': can\'t open :', msg
continue
line = f.readline()
if not re.match('^#! */usr/local/bin/python', line):
print file, ': not a /usr/local/bin/python script'
f.close()
continue
rest = f.read()
f.close()
line = re.sub('/usr/local/bin/python',
'/usr/bin/env python', line)
print file, ':', `line`
f = open(file, "w")
f.write(line)
f.write(rest)
f.close()
main()
|
// ... existing code ...
import sys
import re
def main():
for file in sys.argv[1:]:
try:
f = open(file, 'r')
except IOError, msg:
print file, ': can\'t open :', msg
continue
line = f.readline()
if not re.match('^#! */usr/local/bin/python', line):
print file, ': not a /usr/local/bin/python script'
f.close()
continue
rest = f.read()
f.close()
line = re.sub('/usr/local/bin/python',
'/usr/bin/env python', line)
print file, ':', `line`
f = open(file, "w")
f.write(line)
f.write(rest)
f.close()
// ... rest of the code ...
|
a117b191c402ce051b6e8aec2fced315c119b9eb
|
test/test_large_source_tree.py
|
test/test_large_source_tree.py
|
import unittest
from yeast_harness import *
class TestLargeSourceTree(unittest.TestCase):
def test_large_source_tree(self):
make_filename = lambda ext='': ''.join(
random.choice(string.ascii_lowercase) for _ in range(8)) + ext
make_sources = lambda path: [
CSourceFile(path + '/' + make_filename('.c')) for _ in range(10)]
make_spore = lambda: SporeFile(
sources=make_sources(make_filename()),
products='static_lib',
name=make_filename('.spore'))
mk = Makefile(
spores=[make_spore() for _ in range(10)], name='Makefile')
with SourceTree('tree') as src:
src.create(mk)
build = Build(src, mk)
self.assertEqual(0, build.make())
|
import unittest
from yeast_harness import *
class TestLargeSourceTree(unittest.TestCase):
def test_large_source_tree(self):
make_filename = lambda ext='': ''.join(
random.choice(string.ascii_lowercase) for _ in range(8)) + ext
make_sources = lambda path: [
CSourceFile(path + '/' + make_filename('.c')) for _ in range(100)]
make_spore = lambda: SporeFile(
sources=make_sources(make_filename()),
products='static_lib',
name=make_filename('.spore'))
mk = Makefile(
spores=[make_spore() for _ in range(10)], name='Makefile')
with SourceTree('tree', preserve=True) as src:
src.create(mk)
build = Build(src, mk)
self.assertEqual(0, build.make('-j4'))
|
Increase size of large source tree 10x
|
Increase size of large source tree 10x
- up to 1000 source files
- use parallel make
- preserve source tree
|
Python
|
mit
|
sjanhunen/moss,sjanhunen/yeast,sjanhunen/moss,sjanhunen/gnumake-molds
|
python
|
## Code Before:
import unittest
from yeast_harness import *
class TestLargeSourceTree(unittest.TestCase):
def test_large_source_tree(self):
make_filename = lambda ext='': ''.join(
random.choice(string.ascii_lowercase) for _ in range(8)) + ext
make_sources = lambda path: [
CSourceFile(path + '/' + make_filename('.c')) for _ in range(10)]
make_spore = lambda: SporeFile(
sources=make_sources(make_filename()),
products='static_lib',
name=make_filename('.spore'))
mk = Makefile(
spores=[make_spore() for _ in range(10)], name='Makefile')
with SourceTree('tree') as src:
src.create(mk)
build = Build(src, mk)
self.assertEqual(0, build.make())
## Instruction:
Increase size of large source tree 10x
- up to 1000 source files
- use parallel make
- preserve source tree
## Code After:
import unittest
from yeast_harness import *
class TestLargeSourceTree(unittest.TestCase):
def test_large_source_tree(self):
make_filename = lambda ext='': ''.join(
random.choice(string.ascii_lowercase) for _ in range(8)) + ext
make_sources = lambda path: [
CSourceFile(path + '/' + make_filename('.c')) for _ in range(100)]
make_spore = lambda: SporeFile(
sources=make_sources(make_filename()),
products='static_lib',
name=make_filename('.spore'))
mk = Makefile(
spores=[make_spore() for _ in range(10)], name='Makefile')
with SourceTree('tree', preserve=True) as src:
src.create(mk)
build = Build(src, mk)
self.assertEqual(0, build.make('-j4'))
|
// ... existing code ...
random.choice(string.ascii_lowercase) for _ in range(8)) + ext
make_sources = lambda path: [
CSourceFile(path + '/' + make_filename('.c')) for _ in range(100)]
make_spore = lambda: SporeFile(
sources=make_sources(make_filename()),
products='static_lib',
// ... modified code ...
mk = Makefile(
spores=[make_spore() for _ in range(10)], name='Makefile')
with SourceTree('tree', preserve=True) as src:
src.create(mk)
build = Build(src, mk)
self.assertEqual(0, build.make('-j4'))
// ... rest of the code ...
|
b65acff0d36bd7b26d73399658cf9bf6a357c41b
|
app/src/main/java/net/squanchy/notification/UpcomingEventsService.kt
|
app/src/main/java/net/squanchy/notification/UpcomingEventsService.kt
|
package net.squanchy.notification
import io.reactivex.Single
import net.squanchy.schedule.domain.view.Event
import net.squanchy.support.system.CurrentTime
import org.threeten.bp.Duration
import org.threeten.bp.ZonedDateTime
class UpcomingEventsService(
private val service: NotificationService,
private val currentTime: CurrentTime,
private val notificationInterval: Duration
) {
fun upcomingEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(now) } }
.map { events -> events.filter { isBeforeOrEqualTo(it.zonedStartTime, notificationIntervalEnd) } }
}
private fun isBeforeOrEqualTo(start: ZonedDateTime, notificationIntervalEnd: ZonedDateTime): Boolean {
return start.isBefore(notificationIntervalEnd) || start.isEqual(notificationIntervalEnd)
}
fun nextEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(notificationIntervalEnd) } }
}
}
|
package net.squanchy.notification
import io.reactivex.Single
import net.squanchy.schedule.domain.view.Event
import net.squanchy.support.system.CurrentTime
import org.threeten.bp.Duration
import org.threeten.bp.ZonedDateTime
class UpcomingEventsService(
private val service: NotificationService,
private val currentTime: CurrentTime,
private val notificationInterval: Duration
) {
fun upcomingEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(now) } }
.map { events -> events.filter { it.zonedStartTime.isBeforeOrEqualTo(notificationIntervalEnd) } }
}
private fun ZonedDateTime.isBeforeOrEqualTo(other: ZonedDateTime) =
isBefore(other) || isEqual(other)
fun nextEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(notificationIntervalEnd) } }
}
}
|
Transform method to extension function
|
Transform method to extension function
|
Kotlin
|
apache-2.0
|
squanchy-dev/squanchy-android,squanchy-dev/squanchy-android,squanchy-dev/squanchy-android
|
kotlin
|
## Code Before:
package net.squanchy.notification
import io.reactivex.Single
import net.squanchy.schedule.domain.view.Event
import net.squanchy.support.system.CurrentTime
import org.threeten.bp.Duration
import org.threeten.bp.ZonedDateTime
class UpcomingEventsService(
private val service: NotificationService,
private val currentTime: CurrentTime,
private val notificationInterval: Duration
) {
fun upcomingEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(now) } }
.map { events -> events.filter { isBeforeOrEqualTo(it.zonedStartTime, notificationIntervalEnd) } }
}
private fun isBeforeOrEqualTo(start: ZonedDateTime, notificationIntervalEnd: ZonedDateTime): Boolean {
return start.isBefore(notificationIntervalEnd) || start.isEqual(notificationIntervalEnd)
}
fun nextEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(notificationIntervalEnd) } }
}
}
## Instruction:
Transform method to extension function
## Code After:
package net.squanchy.notification
import io.reactivex.Single
import net.squanchy.schedule.domain.view.Event
import net.squanchy.support.system.CurrentTime
import org.threeten.bp.Duration
import org.threeten.bp.ZonedDateTime
class UpcomingEventsService(
private val service: NotificationService,
private val currentTime: CurrentTime,
private val notificationInterval: Duration
) {
fun upcomingEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(now) } }
.map { events -> events.filter { it.zonedStartTime.isBeforeOrEqualTo(notificationIntervalEnd) } }
}
private fun ZonedDateTime.isBeforeOrEqualTo(other: ZonedDateTime) =
isBefore(other) || isEqual(other)
fun nextEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
val notificationIntervalEnd = now.plus(notificationInterval)
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(notificationIntervalEnd) } }
}
}
|
# ... existing code ...
return service.sortedFavourites()
.map { events -> events.filter { it.zonedStartTime.isAfter(now) } }
.map { events -> events.filter { it.zonedStartTime.isBeforeOrEqualTo(notificationIntervalEnd) } }
}
private fun ZonedDateTime.isBeforeOrEqualTo(other: ZonedDateTime) =
isBefore(other) || isEqual(other)
fun nextEvents(): Single<List<Event>> {
val now = currentTime.currentDateTime()
# ... rest of the code ...
|
6eae2e0865b070cc481b1b6cde3d7d9e467e8f78
|
tests/src/test/java/clarifai2/test/ConvenienceMethodTests.java
|
tests/src/test/java/clarifai2/test/ConvenienceMethodTests.java
|
package clarifai2.test;
import clarifai2.api.ClarifaiUtil;
import clarifai2.api.request.ClarifaiRequest;
import clarifai2.dto.input.ClarifaiInput;
import clarifai2.dto.input.image.ClarifaiImage;
import clarifai2.dto.model.Model;
import clarifai2.dto.model.output_info.ConceptOutputInfo;
import clarifai2.dto.prediction.Concept;
import org.junit.Test;
public class ConvenienceMethodTests extends BaseClarifaiAPITest {
@Test public void testAwaitTraining() {
for (int i = 0; i < 1; i++) {
assertSuccess(client.deleteAllModels());
assertSuccess(client.createModel("mod1")
.withOutputInfo(ConceptOutputInfo.forConcepts(
Concept.forID("train")
))
);
assertSuccess(client.addInputs()
.plus(
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_FILE))
.withConcepts(
Concept.forID("train")
)
)
);
final ClarifaiRequest<Model<?>> request = ClarifaiUtil.trainAndAwaitCompletion(client, "mod1");
if (i == 0) {
assertSuccessAsync(request);
} else {
assertSuccess(request);
}
}
}
}
|
package clarifai2.test;
import clarifai2.api.ClarifaiUtil;
import clarifai2.api.request.ClarifaiRequest;
import clarifai2.dto.input.ClarifaiInput;
import clarifai2.dto.input.image.ClarifaiImage;
import clarifai2.dto.model.Model;
import clarifai2.dto.model.output_info.ConceptOutputInfo;
import clarifai2.dto.prediction.Concept;
import org.junit.Test;
public class ConvenienceMethodTests extends BaseClarifaiAPITest {
@Test public void testAwaitTraining() {
for (int i = 0; i < 1; i++) {
assertSuccess(client.deleteAllModels());
assertSuccess(client.addInputs()
.plus(
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_FILE))
.withConcepts(
Concept.forID("train")
)
)
);
assertSuccess(client.createModel("mod1")
.withOutputInfo(ConceptOutputInfo.forConcepts(
Concept.forID("train")
))
);
final ClarifaiRequest<Model<?>> request = ClarifaiUtil.trainAndAwaitCompletion(client, "mod1");
if (i == 0) {
assertSuccessAsync(request);
} else {
assertSuccess(request);
}
}
}
}
|
Move input-add before model-create in test
|
Move input-add before model-create in test
|
Java
|
mit
|
Clarifai/clarifai-api-java
|
java
|
## Code Before:
package clarifai2.test;
import clarifai2.api.ClarifaiUtil;
import clarifai2.api.request.ClarifaiRequest;
import clarifai2.dto.input.ClarifaiInput;
import clarifai2.dto.input.image.ClarifaiImage;
import clarifai2.dto.model.Model;
import clarifai2.dto.model.output_info.ConceptOutputInfo;
import clarifai2.dto.prediction.Concept;
import org.junit.Test;
public class ConvenienceMethodTests extends BaseClarifaiAPITest {
@Test public void testAwaitTraining() {
for (int i = 0; i < 1; i++) {
assertSuccess(client.deleteAllModels());
assertSuccess(client.createModel("mod1")
.withOutputInfo(ConceptOutputInfo.forConcepts(
Concept.forID("train")
))
);
assertSuccess(client.addInputs()
.plus(
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_FILE))
.withConcepts(
Concept.forID("train")
)
)
);
final ClarifaiRequest<Model<?>> request = ClarifaiUtil.trainAndAwaitCompletion(client, "mod1");
if (i == 0) {
assertSuccessAsync(request);
} else {
assertSuccess(request);
}
}
}
}
## Instruction:
Move input-add before model-create in test
## Code After:
package clarifai2.test;
import clarifai2.api.ClarifaiUtil;
import clarifai2.api.request.ClarifaiRequest;
import clarifai2.dto.input.ClarifaiInput;
import clarifai2.dto.input.image.ClarifaiImage;
import clarifai2.dto.model.Model;
import clarifai2.dto.model.output_info.ConceptOutputInfo;
import clarifai2.dto.prediction.Concept;
import org.junit.Test;
public class ConvenienceMethodTests extends BaseClarifaiAPITest {
@Test public void testAwaitTraining() {
for (int i = 0; i < 1; i++) {
assertSuccess(client.deleteAllModels());
assertSuccess(client.addInputs()
.plus(
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_FILE))
.withConcepts(
Concept.forID("train")
)
)
);
assertSuccess(client.createModel("mod1")
.withOutputInfo(ConceptOutputInfo.forConcepts(
Concept.forID("train")
))
);
final ClarifaiRequest<Model<?>> request = ClarifaiUtil.trainAndAwaitCompletion(client, "mod1");
if (i == 0) {
assertSuccessAsync(request);
} else {
assertSuccess(request);
}
}
}
}
|
# ... existing code ...
for (int i = 0; i < 1; i++) {
assertSuccess(client.deleteAllModels());
assertSuccess(client.addInputs()
.plus(
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_FILE))
# ... modified code ...
Concept.forID("train")
)
)
);
assertSuccess(client.createModel("mod1")
.withOutputInfo(ConceptOutputInfo.forConcepts(
Concept.forID("train")
))
);
final ClarifaiRequest<Model<?>> request = ClarifaiUtil.trainAndAwaitCompletion(client, "mod1");
# ... rest of the code ...
|
fa7d8c7171ec4f5cc9c4cc0073d17fec41f840e7
|
demo/java/src/main/java/tripleplay/demo/TripleDemoJava.java
|
demo/java/src/main/java/tripleplay/demo/TripleDemoJava.java
|
//
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2013, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.demo;
import playn.core.PlayN;
import playn.java.JavaPlatform;
import tripleplay.platform.JavaTPPlatform;
public class TripleDemoJava
{
public static void main (String[] args) {
JavaPlatform.Config config = new JavaPlatform.Config();
JavaPlatform platform = JavaPlatform.register(config);
TripleDemo.mainArgs = args;
// TODO: upgrade to include other systems
if (System.getProperty("os.name").contains("Linux")) {
JavaTPPlatform.register(platform, config);
}
PlayN.run(new TripleDemo());
}
}
|
//
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2013, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.demo;
import java.util.List;
import com.google.common.collect.Lists;
import playn.core.PlayN;
import playn.java.JavaPlatform;
import tripleplay.platform.JavaTPPlatform;
public class TripleDemoJava
{
public static void main (String[] args) {
JavaPlatform.Config config = new JavaPlatform.Config();
List<String> mainArgs = Lists.newArrayList();
for (int ii = 0; ii < args.length; ii++) {
String size = "--size=";
if (args[ii].startsWith(size)) {
String[] wh = args[ii].substring(size.length()).split("x");
config.width = Integer.parseInt(wh[0]);
config.height = Integer.parseInt(wh[1]);
continue;
}
mainArgs.add(args[ii]);
}
JavaPlatform platform = JavaPlatform.register(config);
TripleDemo.mainArgs = mainArgs.toArray(new String[0]);
JavaTPPlatform.register(platform, config);
PlayN.run(new TripleDemo());
}
}
|
Allow window size override on command line.
|
Allow window size override on command line.
|
Java
|
bsd-3-clause
|
joansmith/tripleplay,tomfisher/tripleplay,joansmith/tripleplay,joansmith/tripleplay,joansmith/tripleplay,tomfisher/tripleplay,tomfisher/tripleplay,tomfisher/tripleplay,joansmith/tripleplay,tomfisher/tripleplay
|
java
|
## Code Before:
//
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2013, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.demo;
import playn.core.PlayN;
import playn.java.JavaPlatform;
import tripleplay.platform.JavaTPPlatform;
public class TripleDemoJava
{
public static void main (String[] args) {
JavaPlatform.Config config = new JavaPlatform.Config();
JavaPlatform platform = JavaPlatform.register(config);
TripleDemo.mainArgs = args;
// TODO: upgrade to include other systems
if (System.getProperty("os.name").contains("Linux")) {
JavaTPPlatform.register(platform, config);
}
PlayN.run(new TripleDemo());
}
}
## Instruction:
Allow window size override on command line.
## Code After:
//
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2013, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.demo;
import java.util.List;
import com.google.common.collect.Lists;
import playn.core.PlayN;
import playn.java.JavaPlatform;
import tripleplay.platform.JavaTPPlatform;
public class TripleDemoJava
{
public static void main (String[] args) {
JavaPlatform.Config config = new JavaPlatform.Config();
List<String> mainArgs = Lists.newArrayList();
for (int ii = 0; ii < args.length; ii++) {
String size = "--size=";
if (args[ii].startsWith(size)) {
String[] wh = args[ii].substring(size.length()).split("x");
config.width = Integer.parseInt(wh[0]);
config.height = Integer.parseInt(wh[1]);
continue;
}
mainArgs.add(args[ii]);
}
JavaPlatform platform = JavaPlatform.register(config);
TripleDemo.mainArgs = mainArgs.toArray(new String[0]);
JavaTPPlatform.register(platform, config);
PlayN.run(new TripleDemo());
}
}
|
// ... existing code ...
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.demo;
import java.util.List;
import com.google.common.collect.Lists;
import playn.core.PlayN;
import playn.java.JavaPlatform;
// ... modified code ...
{
public static void main (String[] args) {
JavaPlatform.Config config = new JavaPlatform.Config();
List<String> mainArgs = Lists.newArrayList();
for (int ii = 0; ii < args.length; ii++) {
String size = "--size=";
if (args[ii].startsWith(size)) {
String[] wh = args[ii].substring(size.length()).split("x");
config.width = Integer.parseInt(wh[0]);
config.height = Integer.parseInt(wh[1]);
continue;
}
mainArgs.add(args[ii]);
}
JavaPlatform platform = JavaPlatform.register(config);
TripleDemo.mainArgs = mainArgs.toArray(new String[0]);
JavaTPPlatform.register(platform, config);
PlayN.run(new TripleDemo());
}
// ... rest of the code ...
|
3cacced39d9cb8bd5d6a2b3db8aa4b5aa1b37f58
|
jaraco/util/meta.py
|
jaraco/util/meta.py
|
from __future__ import unicode_literals
class LeafClassesMeta(type):
"""
A metaclass for classes that keeps track of all of them that
aren't base classes.
"""
_leaf_classes = set()
def __init__(cls, name, bases, attrs):
if not hasattr(cls, '_leaf_classes'):
cls._leaf_classes = set()
leaf_classes = getattr(cls, '_leaf_classes')
leaf_classes.add(cls)
# remove any base classes
leaf_classes -= set(bases)
class TagRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, 'tag'.
"""
def __init__(cls, name, bases, namespace):
super(TagRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
attr = getattr(cls, 'tag', None)
if attr:
cls._registry[attr] = cls
|
from __future__ import unicode_literals
class LeafClassesMeta(type):
"""
A metaclass for classes that keeps track of all of them that
aren't base classes.
"""
_leaf_classes = set()
def __init__(cls, name, bases, attrs):
if not hasattr(cls, '_leaf_classes'):
cls._leaf_classes = set()
leaf_classes = getattr(cls, '_leaf_classes')
leaf_classes.add(cls)
# remove any base classes
leaf_classes -= set(bases)
class TagRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, indicated by attr_name.
"""
attr_name = 'tag'
def __init__(cls, name, bases, namespace):
super(TagRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
meta = cls.__class__
attr = getattr(cls, meta.attr_name, None)
if attr:
cls._registry[attr] = cls
|
Allow attribute to be customized in TagRegistered
|
Allow attribute to be customized in TagRegistered
|
Python
|
mit
|
jaraco/jaraco.classes
|
python
|
## Code Before:
from __future__ import unicode_literals
class LeafClassesMeta(type):
"""
A metaclass for classes that keeps track of all of them that
aren't base classes.
"""
_leaf_classes = set()
def __init__(cls, name, bases, attrs):
if not hasattr(cls, '_leaf_classes'):
cls._leaf_classes = set()
leaf_classes = getattr(cls, '_leaf_classes')
leaf_classes.add(cls)
# remove any base classes
leaf_classes -= set(bases)
class TagRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, 'tag'.
"""
def __init__(cls, name, bases, namespace):
super(TagRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
attr = getattr(cls, 'tag', None)
if attr:
cls._registry[attr] = cls
## Instruction:
Allow attribute to be customized in TagRegistered
## Code After:
from __future__ import unicode_literals
class LeafClassesMeta(type):
"""
A metaclass for classes that keeps track of all of them that
aren't base classes.
"""
_leaf_classes = set()
def __init__(cls, name, bases, attrs):
if not hasattr(cls, '_leaf_classes'):
cls._leaf_classes = set()
leaf_classes = getattr(cls, '_leaf_classes')
leaf_classes.add(cls)
# remove any base classes
leaf_classes -= set(bases)
class TagRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, indicated by attr_name.
"""
attr_name = 'tag'
def __init__(cls, name, bases, namespace):
super(TagRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
meta = cls.__class__
attr = getattr(cls, meta.attr_name, None)
if attr:
cls._registry[attr] = cls
|
// ... existing code ...
class TagRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, indicated by attr_name.
"""
attr_name = 'tag'
def __init__(cls, name, bases, namespace):
super(TagRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
meta = cls.__class__
attr = getattr(cls, meta.attr_name, None)
if attr:
cls._registry[attr] = cls
// ... rest of the code ...
|
6282aa2617bcc9bb8f293ea620eff23d2009334b
|
example/test.py
|
example/test.py
|
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
|
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
|
Use axpy on sample Python script
|
Use axpy on sample Python script
|
Python
|
bsd-2-clause
|
termoshtt/rust-numpy,termoshtt/rust-numpy
|
python
|
## Code Before:
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
## Instruction:
Use axpy on sample Python script
## Code After:
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
|
// ... existing code ...
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
// ... rest of the code ...
|
cde822bc87efa47cc3fae6fbb9462ae6a362afbc
|
fedmsg.d/endpoints.py
|
fedmsg.d/endpoints.py
|
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
|
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
Add debian endpoint as comment to file.
|
Add debian endpoint as comment to file.
|
Python
|
lgpl-2.1
|
fedora-infra/fedmsg,vivekanand1101/fedmsg,cicku/fedmsg,cicku/fedmsg,pombredanne/fedmsg,chaiku/fedmsg,vivekanand1101/fedmsg,cicku/fedmsg,mathstuf/fedmsg,vivekanand1101/fedmsg,chaiku/fedmsg,fedora-infra/fedmsg,pombredanne/fedmsg,mathstuf/fedmsg,maxamillion/fedmsg,maxamillion/fedmsg,mathstuf/fedmsg,chaiku/fedmsg,pombredanne/fedmsg,fedora-infra/fedmsg,maxamillion/fedmsg
|
python
|
## Code Before:
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
## Instruction:
Add debian endpoint as comment to file.
## Code After:
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
# ... existing code ...
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
# ... rest of the code ...
|
f981802947fd2c15be04489f6805395971807c9d
|
PVGeo/__main__.py
|
PVGeo/__main__.py
|
__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
Add catch for Travis CI testing.
|
Add catch for Travis CI testing.
|
Python
|
bsd-3-clause
|
banesullivan/ParaViewGeophysics,banesullivan/ParaViewGeophysics,banesullivan/ParaViewGeophysics
|
python
|
## Code Before:
__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
## Instruction:
Add catch for Travis CI testing.
## Code After:
__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
...
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
...
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
...
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.