commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
689eff4429e695db44297b734dad5e24bf1e1651
|
src/test/java/com/coravy/couch4j/PerfTest.java
|
src/test/java/com/coravy/couch4j/PerfTest.java
|
package com.coravy.couch4j;
import static org.junit.Assert.*;
import org.junit.Test;
/**
* Run on the same machine
*
* <p>
* Check established connections:
* <pre>
* sudo netstat -tap tcp
* </pre>
*/
public class PerfTest {
@Test(timeout=3500)
public void fetchMultipleDocuments() throws Exception {
Database<Document> test = CouchDB.localServerInstance().getDatabase("couch4j");
final int UPPER = 1000;
long start = System.currentTimeMillis();
for (int i = 0; i <= UPPER; i++) {
Document d = test.fetchDocument(Couch4jTest.VALID_DOC_ID);
assertNotNull(d.getRev());
assertEquals(Couch4jTest.VALID_DOC_ID, d.getId());
}
long duration = System.currentTimeMillis() - start;
System.out.format("Fetching %d documents took %d ms", UPPER, duration);
}
}
|
package com.coravy.couch4j;
import static org.junit.Assert.*;
import org.junit.Test;
/**
* Run on the same machine
*
* <p>
* Check established connections:
* <pre>
* sudo netstat -tap tcp
* </pre>
*/
public class PerfTest {
@Test(timeout=7000) // ~ 2700 ms on a 3.06 Core 2 Duo
public void fetchMultipleDocuments() throws Exception {
Database<Document> test = CouchDB.localServerInstance().getDatabase("couch4j");
final int UPPER = 1000;
long start = System.currentTimeMillis();
for (int i = 0; i <= UPPER; i++) {
Document d = test.fetchDocument(Couch4jTest.VALID_DOC_ID);
assertNotNull(d.getRev());
assertEquals(Couch4jTest.VALID_DOC_ID, d.getId());
}
long duration = System.currentTimeMillis() - start;
System.out.format("Fetching %d documents took %d ms", UPPER, duration);
}
}
|
Increase perf test timeout for the CI machine.
|
Increase perf test timeout for the CI machine.
|
Java
|
mit
|
lettas/couch4j
|
java
|
## Code Before:
package com.coravy.couch4j;
import static org.junit.Assert.*;
import org.junit.Test;
/**
* Run on the same machine
*
* <p>
* Check established connections:
* <pre>
* sudo netstat -tap tcp
* </pre>
*/
public class PerfTest {
@Test(timeout=3500)
public void fetchMultipleDocuments() throws Exception {
Database<Document> test = CouchDB.localServerInstance().getDatabase("couch4j");
final int UPPER = 1000;
long start = System.currentTimeMillis();
for (int i = 0; i <= UPPER; i++) {
Document d = test.fetchDocument(Couch4jTest.VALID_DOC_ID);
assertNotNull(d.getRev());
assertEquals(Couch4jTest.VALID_DOC_ID, d.getId());
}
long duration = System.currentTimeMillis() - start;
System.out.format("Fetching %d documents took %d ms", UPPER, duration);
}
}
## Instruction:
Increase perf test timeout for the CI machine.
## Code After:
package com.coravy.couch4j;
import static org.junit.Assert.*;
import org.junit.Test;
/**
* Run on the same machine
*
* <p>
* Check established connections:
* <pre>
* sudo netstat -tap tcp
* </pre>
*/
public class PerfTest {
@Test(timeout=7000) // ~ 2700 ms on a 3.06 Core 2 Duo
public void fetchMultipleDocuments() throws Exception {
Database<Document> test = CouchDB.localServerInstance().getDatabase("couch4j");
final int UPPER = 1000;
long start = System.currentTimeMillis();
for (int i = 0; i <= UPPER; i++) {
Document d = test.fetchDocument(Couch4jTest.VALID_DOC_ID);
assertNotNull(d.getRev());
assertEquals(Couch4jTest.VALID_DOC_ID, d.getId());
}
long duration = System.currentTimeMillis() - start;
System.out.format("Fetching %d documents took %d ms", UPPER, duration);
}
}
|
# ... existing code ...
*/
public class PerfTest {
@Test(timeout=7000) // ~ 2700 ms on a 3.06 Core 2 Duo
public void fetchMultipleDocuments() throws Exception {
Database<Document> test = CouchDB.localServerInstance().getDatabase("couch4j");
final int UPPER = 1000;
# ... rest of the code ...
|
14917a4d503569147277bfd5fefa4b2600dfea40
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
|
import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
|
Set xfailtags as first priority
|
Set xfailtags as first priority
|
Python
|
mit
|
dincamihai/salt-toaster,dincamihai/salt-toaster
|
python
|
## Code Before:
import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
## Instruction:
Set xfailtags as first priority
## Code After:
import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
|
// ... existing code ...
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
// ... rest of the code ...
|
3ddad0538430499182c583a0a7f877884038c0a5
|
Lib/test/test_symtable.py
|
Lib/test/test_symtable.py
|
from test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
from test import test_support
import symtable
import unittest
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
class SymtableTest(unittest.TestCase):
def test_invalid_args(self):
self.assertRaises(TypeError, symtable.symtable, "42")
self.assertRaises(ValueError, symtable.symtable, "42", "?", "")
def test_eval(self):
symbols = symtable.symtable("42", "?", "eval")
def test_single(self):
symbols = symtable.symtable("42", "?", "single")
def test_exec(self):
symbols = symtable.symtable("def f(x): return x", "?", "exec")
def test_main():
test_support.run_unittest(SymtableTest)
if __name__ == '__main__':
test_main()
|
Use unittest and make sure a few other cases don't crash
|
Use unittest and make sure a few other cases don't crash
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
python
|
## Code Before:
from test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
## Instruction:
Use unittest and make sure a few other cases don't crash
## Code After:
from test import test_support
import symtable
import unittest
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
class SymtableTest(unittest.TestCase):
def test_invalid_args(self):
self.assertRaises(TypeError, symtable.symtable, "42")
self.assertRaises(ValueError, symtable.symtable, "42", "?", "")
def test_eval(self):
symbols = symtable.symtable("42", "?", "eval")
def test_single(self):
symbols = symtable.symtable("42", "?", "single")
def test_exec(self):
symbols = symtable.symtable("def f(x): return x", "?", "exec")
def test_main():
test_support.run_unittest(SymtableTest)
if __name__ == '__main__':
test_main()
|
// ... existing code ...
from test import test_support
import symtable
import unittest
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
// ... modified code ...
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
class SymtableTest(unittest.TestCase):
def test_invalid_args(self):
self.assertRaises(TypeError, symtable.symtable, "42")
self.assertRaises(ValueError, symtable.symtable, "42", "?", "")
def test_eval(self):
symbols = symtable.symtable("42", "?", "eval")
def test_single(self):
symbols = symtable.symtable("42", "?", "single")
def test_exec(self):
symbols = symtable.symtable("def f(x): return x", "?", "exec")
def test_main():
test_support.run_unittest(SymtableTest)
if __name__ == '__main__':
test_main()
// ... rest of the code ...
|
8da30e95a9dcf419ac33cb372670bd2a2ddd4560
|
src/main/MatrixTransformation.java
|
src/main/MatrixTransformation.java
|
package main;
class MatrixTransformation {
static int[][] rotateTo90Degrees(int[][] matrix) {
return new int[][]{};
}
}
|
package main;
class MatrixTransformation {
static int[][] rotateTo90Degrees(int[][] matrix) {
int n = matrix.length;
if (n == 0 || n != matrix[0].length) return matrix;
for (int layer = 0; layer < n/2; layer++) {
rotateLayer(layer, matrix);
}
return matrix;
}
private static void rotateLayer(int firstLayer, int[][] matrix) {
int n = matrix.length;
int lastLayer = n - 1 - firstLayer;
for (int i = firstLayer; i < lastLayer; i++) {
rotateElement(firstLayer, i, lastLayer, matrix);
}
}
private static void rotateElement(int firstLayer, int currentLayer, int lastLayer, int[][] matrix) {
int offset = currentLayer - firstLayer;
int tmp = matrix[firstLayer][currentLayer];
matrix[firstLayer][currentLayer] = matrix[lastLayer - offset][firstLayer];
matrix[lastLayer - offset][firstLayer] = matrix[lastLayer][lastLayer - offset];
matrix[lastLayer][lastLayer - offset] = matrix[currentLayer][lastLayer];
matrix[currentLayer][lastLayer] = tmp;
}
}
|
Add solution for matrix rotation task. It takes O(n^2) time (cause implement element by element rotation) and constant space.
|
Add solution for matrix rotation task. It takes O(n^2) time (cause implement element by element rotation) and constant space.
|
Java
|
bsd-3-clause
|
mikhsol/crackingTheCodingInterviewJava
|
java
|
## Code Before:
package main;
class MatrixTransformation {
static int[][] rotateTo90Degrees(int[][] matrix) {
return new int[][]{};
}
}
## Instruction:
Add solution for matrix rotation task. It takes O(n^2) time (cause implement element by element rotation) and constant space.
## Code After:
package main;
class MatrixTransformation {
static int[][] rotateTo90Degrees(int[][] matrix) {
int n = matrix.length;
if (n == 0 || n != matrix[0].length) return matrix;
for (int layer = 0; layer < n/2; layer++) {
rotateLayer(layer, matrix);
}
return matrix;
}
private static void rotateLayer(int firstLayer, int[][] matrix) {
int n = matrix.length;
int lastLayer = n - 1 - firstLayer;
for (int i = firstLayer; i < lastLayer; i++) {
rotateElement(firstLayer, i, lastLayer, matrix);
}
}
private static void rotateElement(int firstLayer, int currentLayer, int lastLayer, int[][] matrix) {
int offset = currentLayer - firstLayer;
int tmp = matrix[firstLayer][currentLayer];
matrix[firstLayer][currentLayer] = matrix[lastLayer - offset][firstLayer];
matrix[lastLayer - offset][firstLayer] = matrix[lastLayer][lastLayer - offset];
matrix[lastLayer][lastLayer - offset] = matrix[currentLayer][lastLayer];
matrix[currentLayer][lastLayer] = tmp;
}
}
|
# ... existing code ...
class MatrixTransformation {
static int[][] rotateTo90Degrees(int[][] matrix) {
int n = matrix.length;
if (n == 0 || n != matrix[0].length) return matrix;
for (int layer = 0; layer < n/2; layer++) {
rotateLayer(layer, matrix);
}
return matrix;
}
private static void rotateLayer(int firstLayer, int[][] matrix) {
int n = matrix.length;
int lastLayer = n - 1 - firstLayer;
for (int i = firstLayer; i < lastLayer; i++) {
rotateElement(firstLayer, i, lastLayer, matrix);
}
}
private static void rotateElement(int firstLayer, int currentLayer, int lastLayer, int[][] matrix) {
int offset = currentLayer - firstLayer;
int tmp = matrix[firstLayer][currentLayer];
matrix[firstLayer][currentLayer] = matrix[lastLayer - offset][firstLayer];
matrix[lastLayer - offset][firstLayer] = matrix[lastLayer][lastLayer - offset];
matrix[lastLayer][lastLayer - offset] = matrix[currentLayer][lastLayer];
matrix[currentLayer][lastLayer] = tmp;
}
}
# ... rest of the code ...
|
1056c3f489b162d77b6c117fad2b45bfa06beee1
|
app/urls.py
|
app/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'), #delete eventually
url(r'^posts', 'app.views.posts', name='posts'),
url(r'^admin/', include(admin.site.urls))
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'),
url(r'^admin/', include(admin.site.urls))
)
|
Revert "Added a post view"
|
Revert "Added a post view"
This reverts commit b1063480e7b2e1128c457e9e65c52f742109d90d.
|
Python
|
unlicense
|
yourbuddyconner/cs399-social,yourbuddyconner/cs399-social
|
python
|
## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'), #delete eventually
url(r'^posts', 'app.views.posts', name='posts'),
url(r'^admin/', include(admin.site.urls))
)
## Instruction:
Revert "Added a post view"
This reverts commit b1063480e7b2e1128c457e9e65c52f742109d90d.
## Code After:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'),
url(r'^admin/', include(admin.site.urls))
)
|
...
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
...
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'),
url(r'^admin/', include(admin.site.urls))
)
...
|
029b9de65f189345ec8ef630757aa1f2e280007a
|
parity-top/src/main/java/org/jvirtanen/parity/top/MarketListener.java
|
parity-top/src/main/java/org/jvirtanen/parity/top/MarketListener.java
|
package org.jvirtanen.parity.top;
/**
* <code>MarketListener</code> is the interface for outbound events from the
* order book reconstruction.
*/
public interface MarketListener {
/**
* An event indicating that the best bid and offer (BBO) has changed.
*
* @param instrument the instrument
* @param bidPrice the bid price or zero if there are no bids
* @param bidSize the bid size or zero if there are no bids
* @param askPrice the ask price or zero if there are no asks
* @param askSize the ask size or zero if there are no asks
*/
void bbo(long instrument, long bidPrice, long bidSize, long askPrice, long askSize);
/**
* An event indicating that a trade has taken place.
*
* @param instrument the instrument
* @param side the side of the resting order
* @param price the trade price
* @param size the trade size
*/
void trade(long instrument, Side side, long price, long size);
}
|
package org.jvirtanen.parity.top;
/**
* The interface for outbound events from the order book reconstruction.
*/
public interface MarketListener {
/**
* An event indicating that the best bid and offer (BBO) has changed.
*
* @param instrument the instrument
* @param bidPrice the bid price or zero if there are no bids
* @param bidSize the bid size or zero if there are no bids
* @param askPrice the ask price or zero if there are no asks
* @param askSize the ask size or zero if there are no asks
*/
void bbo(long instrument, long bidPrice, long bidSize, long askPrice, long askSize);
/**
* An event indicating that a trade has taken place.
*
* @param instrument the instrument
* @param side the side of the resting order
* @param price the trade price
* @param size the trade size
*/
void trade(long instrument, Side side, long price, long size);
}
|
Tweak documentation for market listener in order book reconstruction
|
Tweak documentation for market listener in order book reconstruction
|
Java
|
apache-2.0
|
pmcs/parity,pmcs/parity,paritytrading/parity,paritytrading/parity
|
java
|
## Code Before:
package org.jvirtanen.parity.top;
/**
* <code>MarketListener</code> is the interface for outbound events from the
* order book reconstruction.
*/
public interface MarketListener {
/**
* An event indicating that the best bid and offer (BBO) has changed.
*
* @param instrument the instrument
* @param bidPrice the bid price or zero if there are no bids
* @param bidSize the bid size or zero if there are no bids
* @param askPrice the ask price or zero if there are no asks
* @param askSize the ask size or zero if there are no asks
*/
void bbo(long instrument, long bidPrice, long bidSize, long askPrice, long askSize);
/**
* An event indicating that a trade has taken place.
*
* @param instrument the instrument
* @param side the side of the resting order
* @param price the trade price
* @param size the trade size
*/
void trade(long instrument, Side side, long price, long size);
}
## Instruction:
Tweak documentation for market listener in order book reconstruction
## Code After:
package org.jvirtanen.parity.top;
/**
* The interface for outbound events from the order book reconstruction.
*/
public interface MarketListener {
/**
* An event indicating that the best bid and offer (BBO) has changed.
*
* @param instrument the instrument
* @param bidPrice the bid price or zero if there are no bids
* @param bidSize the bid size or zero if there are no bids
* @param askPrice the ask price or zero if there are no asks
* @param askSize the ask size or zero if there are no asks
*/
void bbo(long instrument, long bidPrice, long bidSize, long askPrice, long askSize);
/**
* An event indicating that a trade has taken place.
*
* @param instrument the instrument
* @param side the side of the resting order
* @param price the trade price
* @param size the trade size
*/
void trade(long instrument, Side side, long price, long size);
}
|
# ... existing code ...
package org.jvirtanen.parity.top;
/**
* The interface for outbound events from the order book reconstruction.
*/
public interface MarketListener {
# ... rest of the code ...
|
c49e05ca04116a78b2a960f3a05dce6319582a8f
|
c/ppb_find.h
|
c/ppb_find.h
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PPAPI_C_PPB_FIND_H_
#define PPAPI_C_PPB_FIND_H_
#include "ppapi/c/pp_instance.h"
#include "ppapi/c/pp_stdint.h"
#define PPB_FIND_INTERFACE "PPB_Find;1"
typedef struct _ppb_Find {
// Updates the number of find results for the current search term. If
// there are no matches 0 should be passed in. Only when the plugin has
// finished searching should it pass in the final count with finalResult set
// to true.
void NumberOfFindResultsChanged(PP_Instance instance,
int32_t total,
bool final_result);
// Updates the index of the currently selected search item.
void SelectedFindResultChanged(PP_Instance instance,
int32_t index);
} PPB_Find;
#endif // PPAPI_C_PPB_FIND_H_
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PPAPI_C_PPB_FIND_H_
#define PPAPI_C_PPB_FIND_H_
#include "ppapi/c/pp_instance.h"
#include "ppapi/c/pp_stdint.h"
#define PPB_FIND_INTERFACE "PPB_Find;1"
typedef struct _ppb_Find {
// Updates the number of find results for the current search term. If
// there are no matches 0 should be passed in. Only when the plugin has
// finished searching should it pass in the final count with finalResult set
// to true.
void (*NumberOfFindResultsChanged)(PP_Instance instance,
int32_t total,
bool final_result);
// Updates the index of the currently selected search item.
void (*SelectedFindResultChanged)(PP_Instance instance,
int32_t index);
} PPB_Find;
#endif // PPAPI_C_PPB_FIND_H_
|
Structure member should be function pointer
|
Structure member should be function pointer
BUG=none
TEST=compiles
Review URL: http://codereview.chromium.org/2972004
|
C
|
bsd-3-clause
|
tiaolong/ppapi,lag945/ppapi,nanox/ppapi,CharlesHuimin/ppapi,c1soju96/ppapi,qwop/ppapi,nanox/ppapi,siweilvxing/ppapi,siweilvxing/ppapi,xinghaizhou/ppapi,xiaozihui/ppapi,whitewolfm/ppapi,dingdayong/ppapi,xinghaizhou/ppapi,ruder/ppapi,fubaydullaev/ppapi,xuesongzhu/ppapi,xinghaizhou/ppapi,rise-worlds/ppapi,cacpssl/ppapi,thdtjsdn/ppapi,xiaozihui/ppapi,phisixersai/ppapi,chenfeng8742/ppapi,JustRight/ppapi,lag945/ppapi,xinghaizhou/ppapi,c1soju96/ppapi,HAfsari/ppapi,siweilvxing/ppapi,tonyjoule/ppapi,gwobay/ppapi,huochetou999/ppapi,stefanie924/ppapi,huochetou999/ppapi,YachaoLiu/ppapi,lag945/ppapi,ruder/ppapi,xiaozihui/ppapi,Xelemsta/ppapi,huochetou999/ppapi,cacpssl/ppapi,YachaoLiu/ppapi,thdtjsdn/ppapi,huqingyu/ppapi,dralves/ppapi,dingdayong/ppapi,nanox/ppapi,HAfsari/ppapi,fubaydullaev/ppapi,phisixersai/ppapi,xuesongzhu/ppapi,gwobay/ppapi,JustRight/ppapi,siweilvxing/ppapi,chenfeng8742/ppapi,fubaydullaev/ppapi,YachaoLiu/ppapi,lag945/ppapi,tonyjoule/ppapi,huqingyu/ppapi,huqingyu/ppapi,fubaydullaev/ppapi,qwop/ppapi,chenfeng8742/ppapi,Xelemsta/ppapi,cacpssl/ppapi,dingdayong/ppapi,rise-worlds/ppapi,gwobay/ppapi,dralves/ppapi,thdtjsdn/ppapi,tonyjoule/ppapi,ruder/ppapi,CharlesHuimin/ppapi,YachaoLiu/ppapi,tonyjoule/ppapi,CharlesHuimin/ppapi,JustRight/ppapi,dingdayong/ppapi,CharlesHuimin/ppapi,tiaolong/ppapi,c1soju96/ppapi,gwobay/ppapi,JustRight/ppapi,tonyjoule/ppapi,chenfeng8742/ppapi,xiaozihui/ppapi,rise-worlds/ppapi,xinghaizhou/ppapi,qwop/ppapi,whitewolfm/ppapi,CharlesHuimin/ppapi,phisixersai/ppapi,fubaydullaev/ppapi,tiaolong/ppapi,qwop/ppapi,xuesongzhu/ppapi,YachaoLiu/ppapi,thdtjsdn/ppapi,huochetou999/ppapi,lag945/ppapi,phisixersai/ppapi,HAfsari/ppapi,siweilvxing/ppapi,xuesongzhu/ppapi,dralves/ppapi,stefanie924/ppapi,tiaolong/ppapi,rise-worlds/ppapi,JustRight/ppapi,Xelemsta/ppapi,nanox/ppapi,phisixersai/ppapi,whitewolfm/ppapi,nanox/ppapi,dralves/ppapi,HAfsari/ppapi,qwop/ppapi,HAfsari/ppapi,gwobay/ppapi,stefanie924/ppapi,huochetou999/ppapi,chenfeng8742/ppapi,Xelemsta/ppapi,xuesongzhu/ppapi,cacpssl/ppapi,whitewolfm/ppapi,Xelemsta/ppapi,huqingyu/ppapi,huqingyu/ppapi,ruder/ppapi,ruder/ppapi,rise-worlds/ppapi,tiaolong/ppapi,whitewolfm/ppapi,dingdayong/ppapi,c1soju96/ppapi,c1soju96/ppapi,stefanie924/ppapi,thdtjsdn/ppapi,xiaozihui/ppapi,cacpssl/ppapi,stefanie924/ppapi,dralves/ppapi
|
c
|
## Code Before:
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PPAPI_C_PPB_FIND_H_
#define PPAPI_C_PPB_FIND_H_
#include "ppapi/c/pp_instance.h"
#include "ppapi/c/pp_stdint.h"
#define PPB_FIND_INTERFACE "PPB_Find;1"
typedef struct _ppb_Find {
// Updates the number of find results for the current search term. If
// there are no matches 0 should be passed in. Only when the plugin has
// finished searching should it pass in the final count with finalResult set
// to true.
void NumberOfFindResultsChanged(PP_Instance instance,
int32_t total,
bool final_result);
// Updates the index of the currently selected search item.
void SelectedFindResultChanged(PP_Instance instance,
int32_t index);
} PPB_Find;
#endif // PPAPI_C_PPB_FIND_H_
## Instruction:
Structure member should be function pointer
BUG=none
TEST=compiles
Review URL: http://codereview.chromium.org/2972004
## Code After:
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PPAPI_C_PPB_FIND_H_
#define PPAPI_C_PPB_FIND_H_
#include "ppapi/c/pp_instance.h"
#include "ppapi/c/pp_stdint.h"
#define PPB_FIND_INTERFACE "PPB_Find;1"
typedef struct _ppb_Find {
// Updates the number of find results for the current search term. If
// there are no matches 0 should be passed in. Only when the plugin has
// finished searching should it pass in the final count with finalResult set
// to true.
void (*NumberOfFindResultsChanged)(PP_Instance instance,
int32_t total,
bool final_result);
// Updates the index of the currently selected search item.
void (*SelectedFindResultChanged)(PP_Instance instance,
int32_t index);
} PPB_Find;
#endif // PPAPI_C_PPB_FIND_H_
|
# ... existing code ...
// there are no matches 0 should be passed in. Only when the plugin has
// finished searching should it pass in the final count with finalResult set
// to true.
void (*NumberOfFindResultsChanged)(PP_Instance instance,
int32_t total,
bool final_result);
// Updates the index of the currently selected search item.
void (*SelectedFindResultChanged)(PP_Instance instance,
int32_t index);
} PPB_Find;
# ... rest of the code ...
|
bc0193a3a32521512b77e6149e91eab805836f8d
|
django_dbq/management/commands/queue_depth.py
|
django_dbq/management/commands/queue_depth.py
|
from django.core.management.base import BaseCommand
from django_dbq.models import Job
class Command(BaseCommand):
help = "Print the current depth of the given queue"
def add_arguments(self, parser):
parser.add_argument("queue_name", nargs="*", default=["default"], type=str)
def handle(self, *args, **options):
queue_names = options["queue_name"]
queue_depths = Job.get_queue_depths()
self.stdout.write(
" ".join(
[
f"{queue_name}={queue_depths.get(queue_name, 0)}"
for queue_name in queue_names
]
)
)
|
from django.core.management.base import BaseCommand
from django_dbq.models import Job
class Command(BaseCommand):
help = "Print the current depth of the given queue"
def add_arguments(self, parser):
parser.add_argument("queue_name", nargs="*", default=["default"], type=str)
def handle(self, *args, **options):
queue_names = options["queue_name"]
queue_depths = Job.get_queue_depths()
self.stdout.write(
" ".join(
[
"{queue_name}={queue_depth}".format(
queue_name=queue_name,
queue_depth=queue_depths.get(queue_name, 0)
)
for queue_name in queue_names
]
)
)
|
Convert f-string to .format call
|
Convert f-string to .format call
|
Python
|
bsd-2-clause
|
dabapps/django-db-queue
|
python
|
## Code Before:
from django.core.management.base import BaseCommand
from django_dbq.models import Job
class Command(BaseCommand):
help = "Print the current depth of the given queue"
def add_arguments(self, parser):
parser.add_argument("queue_name", nargs="*", default=["default"], type=str)
def handle(self, *args, **options):
queue_names = options["queue_name"]
queue_depths = Job.get_queue_depths()
self.stdout.write(
" ".join(
[
f"{queue_name}={queue_depths.get(queue_name, 0)}"
for queue_name in queue_names
]
)
)
## Instruction:
Convert f-string to .format call
## Code After:
from django.core.management.base import BaseCommand
from django_dbq.models import Job
class Command(BaseCommand):
help = "Print the current depth of the given queue"
def add_arguments(self, parser):
parser.add_argument("queue_name", nargs="*", default=["default"], type=str)
def handle(self, *args, **options):
queue_names = options["queue_name"]
queue_depths = Job.get_queue_depths()
self.stdout.write(
" ".join(
[
"{queue_name}={queue_depth}".format(
queue_name=queue_name,
queue_depth=queue_depths.get(queue_name, 0)
)
for queue_name in queue_names
]
)
)
|
...
self.stdout.write(
" ".join(
[
"{queue_name}={queue_depth}".format(
queue_name=queue_name,
queue_depth=queue_depths.get(queue_name, 0)
)
for queue_name in queue_names
]
)
...
|
3e2c7ca2147b28403761cf57dad6d9173a28dc3d
|
docs/tasks.py
|
docs/tasks.py
|
import invoke
import livereload
import shutil
server = livereload.Server()
@invoke.task
def clean():
shutil.rmtree('./build')
@invoke.task(pre=[clean])
def build():
invoke.run('sphinx-build ./source ./build', pty=True)
@invoke.task(pre=[build])
def serve():
server.watch('./source/*', build)
server.watch('./source/**/*', build)
server.serve(
root='./build',
host='localhost',
liveport=35729,
port=8080
)
|
import invoke
import livereload
import shutil
server = livereload.Server()
@invoke.task
def clean():
shutil.rmtree('./build')
@invoke.task(pre=[clean])
def build():
invoke.run('sphinx-build ./source ./build', pty=True)
@invoke.task(pre=[build])
def serve():
server.watch('../reqon/', build)
server.watch('./source/', build)
server.serve(
root='./build',
host='localhost',
liveport=35729,
port=8080
)
|
Fix directory watching when serving the docs.
|
Fix directory watching when serving the docs.
|
Python
|
mit
|
dmpayton/reqon
|
python
|
## Code Before:
import invoke
import livereload
import shutil
server = livereload.Server()
@invoke.task
def clean():
shutil.rmtree('./build')
@invoke.task(pre=[clean])
def build():
invoke.run('sphinx-build ./source ./build', pty=True)
@invoke.task(pre=[build])
def serve():
server.watch('./source/*', build)
server.watch('./source/**/*', build)
server.serve(
root='./build',
host='localhost',
liveport=35729,
port=8080
)
## Instruction:
Fix directory watching when serving the docs.
## Code After:
import invoke
import livereload
import shutil
server = livereload.Server()
@invoke.task
def clean():
shutil.rmtree('./build')
@invoke.task(pre=[clean])
def build():
invoke.run('sphinx-build ./source ./build', pty=True)
@invoke.task(pre=[build])
def serve():
server.watch('../reqon/', build)
server.watch('./source/', build)
server.serve(
root='./build',
host='localhost',
liveport=35729,
port=8080
)
|
# ... existing code ...
@invoke.task(pre=[build])
def serve():
server.watch('../reqon/', build)
server.watch('./source/', build)
server.serve(
root='./build',
host='localhost',
# ... rest of the code ...
|
9fe573614e2f3ca9a6e738afb7f1af84b541092c
|
invertedindex.py
|
invertedindex.py
|
class InvertedIndex:
def __init__(self):
self.index = dict()
def add_mail(self, mail):
for key in ["simple_terms_body", "complexe_terms_body"]:
for terms in mail[key]:
if terms in self.index.keys():
self.index[terms].append((mail["name"], mail[key][terms]))
else:
self.index[terms] = list()
self.index[terms].append((mail["name"], mail[key][terms]))
|
class InvertedIndex:
def __init__(self):
self.index = dict()
def add_mail(self, mail):
for key in ["simple_terms_body", "complexe_terms_body"]:
for terms in mail[key]:
if terms in self.index.keys():
self.index[terms].append((mail["name"], mail[key][terms]))
else:
self.index[terms] = list()
self.index[terms].append((mail["name"], mail[key][terms]))
def terms(self):
for terms in self.index.keys():
yield terms
def get_terms(self):
return self.index.keys()
def file_counter(self, terms):
for val in self.index[terms]:
yield val
def get_file_counter(self, terms):
return self.index.values()
def file(self, terms):
for val in file_counter(terms):
yield val[0]
def counter(self, terms):
for val in file_counter(terms):
yield val[1]
|
Add some access function to inverted index
|
Add some access function to inverted index
|
Python
|
mit
|
Nedgang/adt_project
|
python
|
## Code Before:
class InvertedIndex:
def __init__(self):
self.index = dict()
def add_mail(self, mail):
for key in ["simple_terms_body", "complexe_terms_body"]:
for terms in mail[key]:
if terms in self.index.keys():
self.index[terms].append((mail["name"], mail[key][terms]))
else:
self.index[terms] = list()
self.index[terms].append((mail["name"], mail[key][terms]))
## Instruction:
Add some access function to inverted index
## Code After:
class InvertedIndex:
def __init__(self):
self.index = dict()
def add_mail(self, mail):
for key in ["simple_terms_body", "complexe_terms_body"]:
for terms in mail[key]:
if terms in self.index.keys():
self.index[terms].append((mail["name"], mail[key][terms]))
else:
self.index[terms] = list()
self.index[terms].append((mail["name"], mail[key][terms]))
def terms(self):
for terms in self.index.keys():
yield terms
def get_terms(self):
return self.index.keys()
def file_counter(self, terms):
for val in self.index[terms]:
yield val
def get_file_counter(self, terms):
return self.index.values()
def file(self, terms):
for val in file_counter(terms):
yield val[0]
def counter(self, terms):
for val in file_counter(terms):
yield val[1]
|
# ... existing code ...
self.index[terms] = list()
self.index[terms].append((mail["name"], mail[key][terms]))
def terms(self):
for terms in self.index.keys():
yield terms
def get_terms(self):
return self.index.keys()
def file_counter(self, terms):
for val in self.index[terms]:
yield val
def get_file_counter(self, terms):
return self.index.values()
def file(self, terms):
for val in file_counter(terms):
yield val[0]
def counter(self, terms):
for val in file_counter(terms):
yield val[1]
# ... rest of the code ...
|
0bdea433da15d70ce841edbffb9316085ca8a647
|
main.py
|
main.py
|
import numpy as np
def plot_elevation(avulsion):
import matplotlib.pyplot as plt
z = avulsion.get_value('land_surface__elevation')
plt.imshow(z, origin='lower', cmap='terrain')
plt.colorbar().ax.set_label('Elevation (m)')
plt.show()
def main():
import argparse
from avulsion_bmi import BmiRiverModule
parser = argparse.ArgumentParser('Run the avulsion model')
parser.add_argument('file', help='YAML-formatted parameters file')
parser.add_argument('--days', type=int, default=0,
help='Run model for DAYS')
parser.add_argument('--years', type=int, default=0,
help='Run model for YEARS')
parser.add_argument('--plot', action='store_true',
help='Plot final elevations')
args = parser.parse_args()
np.random.seed(1945)
avulsion = BmiRiverModule()
avulsion.initialize(args.file)
n_steps = int((args.days + args.years * 365.) / avulsion.get_time_step())
for _ in xrange(n_steps):
avulsion.update()
if args.plot:
plot_elevation(avulsion)
avulsion.finalize()
if __name__ == '__main__':
main()
|
import sys
import numpy as np
def plot_elevation(avulsion):
import matplotlib.pyplot as plt
z = avulsion.get_value('land_surface__elevation')
plt.imshow(z, origin='lower', cmap='terrain')
plt.colorbar().ax.set_label('Elevation (m)')
plt.show()
def main():
import argparse
from avulsion_bmi import BmiRiverModule
parser = argparse.ArgumentParser('Run the avulsion model')
parser.add_argument('file', help='YAML-formatted parameters file')
parser.add_argument('--days', type=int, default=0,
help='Run model for DAYS')
parser.add_argument('--years', type=int, default=0,
help='Run model for YEARS')
parser.add_argument('--plot', action='store_true',
help='Plot final elevations')
args = parser.parse_args()
np.random.seed(1945)
avulsion = BmiRiverModule()
avulsion.initialize(args.file)
n_steps = int((args.days + args.years * 365.) / avulsion.get_time_step())
for _ in xrange(n_steps):
avulsion.update()
if args.plot:
plot_elevation(avulsion)
z = avulsion.get_value('land_surface__elevation')
np.savetxt(sys.stdout, z)
avulsion.finalize()
if __name__ == '__main__':
main()
|
Print final surface elevations to stdout.
|
Print final surface elevations to stdout.
|
Python
|
mit
|
mcflugen/avulsion-bmi,katmratliff/avulsion-bmi
|
python
|
## Code Before:
import numpy as np
def plot_elevation(avulsion):
import matplotlib.pyplot as plt
z = avulsion.get_value('land_surface__elevation')
plt.imshow(z, origin='lower', cmap='terrain')
plt.colorbar().ax.set_label('Elevation (m)')
plt.show()
def main():
import argparse
from avulsion_bmi import BmiRiverModule
parser = argparse.ArgumentParser('Run the avulsion model')
parser.add_argument('file', help='YAML-formatted parameters file')
parser.add_argument('--days', type=int, default=0,
help='Run model for DAYS')
parser.add_argument('--years', type=int, default=0,
help='Run model for YEARS')
parser.add_argument('--plot', action='store_true',
help='Plot final elevations')
args = parser.parse_args()
np.random.seed(1945)
avulsion = BmiRiverModule()
avulsion.initialize(args.file)
n_steps = int((args.days + args.years * 365.) / avulsion.get_time_step())
for _ in xrange(n_steps):
avulsion.update()
if args.plot:
plot_elevation(avulsion)
avulsion.finalize()
if __name__ == '__main__':
main()
## Instruction:
Print final surface elevations to stdout.
## Code After:
import sys
import numpy as np
def plot_elevation(avulsion):
import matplotlib.pyplot as plt
z = avulsion.get_value('land_surface__elevation')
plt.imshow(z, origin='lower', cmap='terrain')
plt.colorbar().ax.set_label('Elevation (m)')
plt.show()
def main():
import argparse
from avulsion_bmi import BmiRiverModule
parser = argparse.ArgumentParser('Run the avulsion model')
parser.add_argument('file', help='YAML-formatted parameters file')
parser.add_argument('--days', type=int, default=0,
help='Run model for DAYS')
parser.add_argument('--years', type=int, default=0,
help='Run model for YEARS')
parser.add_argument('--plot', action='store_true',
help='Plot final elevations')
args = parser.parse_args()
np.random.seed(1945)
avulsion = BmiRiverModule()
avulsion.initialize(args.file)
n_steps = int((args.days + args.years * 365.) / avulsion.get_time_step())
for _ in xrange(n_steps):
avulsion.update()
if args.plot:
plot_elevation(avulsion)
z = avulsion.get_value('land_surface__elevation')
np.savetxt(sys.stdout, z)
avulsion.finalize()
if __name__ == '__main__':
main()
|
// ... existing code ...
import sys
import numpy as np
// ... modified code ...
if args.plot:
plot_elevation(avulsion)
z = avulsion.get_value('land_surface__elevation')
np.savetxt(sys.stdout, z)
avulsion.finalize()
// ... rest of the code ...
|
377a0d4acd9a578146c0f02f518dbea502c8461f
|
arividam/siteconfig/apps.py
|
arividam/siteconfig/apps.py
|
from __future__ import unicode_literals
from django.apps import AppConfig
class SiteconfigConfig(AppConfig):
name = 'siteconfig'
|
from __future__ import unicode_literals
from django.apps import AppConfig
from cms.cms_plugins import AliasPlugin
from cms.plugin_pool import plugin_pool
class SiteconfigConfig(AppConfig):
name = 'siteconfig'
verbose_name = "Site Configuration"
def ready(self):
def return_pass(self, r, p):
pass
AliasPlugin.get_extra_global_plugin_menu_items = return_pass
AliasPlugin.get_extra_placeholder_menu_items = return_pass
plugin_pool.unregister_plugin(AliasPlugin)
|
Disable AliasPlugin since it can accidentally
|
Disable AliasPlugin since it can accidentally
cause problems if an alias is placed within itself
|
Python
|
mit
|
c4sc/arividam,c4sc/arividam,c4sc/arividam,c4sc/arividam
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.apps import AppConfig
class SiteconfigConfig(AppConfig):
name = 'siteconfig'
## Instruction:
Disable AliasPlugin since it can accidentally
cause problems if an alias is placed within itself
## Code After:
from __future__ import unicode_literals
from django.apps import AppConfig
from cms.cms_plugins import AliasPlugin
from cms.plugin_pool import plugin_pool
class SiteconfigConfig(AppConfig):
name = 'siteconfig'
verbose_name = "Site Configuration"
def ready(self):
def return_pass(self, r, p):
pass
AliasPlugin.get_extra_global_plugin_menu_items = return_pass
AliasPlugin.get_extra_placeholder_menu_items = return_pass
plugin_pool.unregister_plugin(AliasPlugin)
|
# ... existing code ...
from __future__ import unicode_literals
from django.apps import AppConfig
from cms.cms_plugins import AliasPlugin
from cms.plugin_pool import plugin_pool
class SiteconfigConfig(AppConfig):
name = 'siteconfig'
verbose_name = "Site Configuration"
def ready(self):
def return_pass(self, r, p):
pass
AliasPlugin.get_extra_global_plugin_menu_items = return_pass
AliasPlugin.get_extra_placeholder_menu_items = return_pass
plugin_pool.unregister_plugin(AliasPlugin)
# ... rest of the code ...
|
a662eded2841b87ccbccdd6dfb21315725d0a0c5
|
python/pyspark_llap/__init__.py
|
python/pyspark_llap/__init__.py
|
from pyspark_llap.sql.session import HiveWarehouseSession
__all__ = ['HiveWarehouseSession']
|
from pyspark_llap.sql.session import HiveWarehouseSession
# These are aliases so that importing this module exposes those attributes below directly.
DATAFRAME_TO_STREAM = HiveWarehouseSession.DATAFRAME_TO_STREAM
HIVE_WAREHOUSE_CONNECTOR = HiveWarehouseSession.HIVE_WAREHOUSE_CONNECTOR
STREAM_TO_STREAM = HiveWarehouseSession.STREAM_TO_STREAM
__all__ = [
'HiveWarehouseSession',
'DATAFRAME_TO_STREAM',
'HIVE_WAREHOUSE_CONNECTOR',
'STREAM_TO_STREAM',
]
|
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
|
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
|
Python
|
apache-2.0
|
hortonworks-spark/spark-llap,hortonworks-spark/spark-llap,hortonworks-spark/spark-llap
|
python
|
## Code Before:
from pyspark_llap.sql.session import HiveWarehouseSession
__all__ = ['HiveWarehouseSession']
## Instruction:
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
## Code After:
from pyspark_llap.sql.session import HiveWarehouseSession
# These are aliases so that importing this module exposes those attributes below directly.
DATAFRAME_TO_STREAM = HiveWarehouseSession.DATAFRAME_TO_STREAM
HIVE_WAREHOUSE_CONNECTOR = HiveWarehouseSession.HIVE_WAREHOUSE_CONNECTOR
STREAM_TO_STREAM = HiveWarehouseSession.STREAM_TO_STREAM
__all__ = [
'HiveWarehouseSession',
'DATAFRAME_TO_STREAM',
'HIVE_WAREHOUSE_CONNECTOR',
'STREAM_TO_STREAM',
]
|
// ... existing code ...
from pyspark_llap.sql.session import HiveWarehouseSession
# These are aliases so that importing this module exposes those attributes below directly.
DATAFRAME_TO_STREAM = HiveWarehouseSession.DATAFRAME_TO_STREAM
HIVE_WAREHOUSE_CONNECTOR = HiveWarehouseSession.HIVE_WAREHOUSE_CONNECTOR
STREAM_TO_STREAM = HiveWarehouseSession.STREAM_TO_STREAM
__all__ = [
'HiveWarehouseSession',
'DATAFRAME_TO_STREAM',
'HIVE_WAREHOUSE_CONNECTOR',
'STREAM_TO_STREAM',
]
// ... rest of the code ...
|
5954196d3c81083f7f94eca147fe1a76a6dfb301
|
vc_vidyo/indico_vc_vidyo/blueprint.py
|
vc_vidyo/indico_vc_vidyo/blueprint.py
|
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
blueprint.add_url_rule('/event/<confId>/manage/videoconference/vidyo/<int:event_vc_room_id>/room-owner/',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',), defaults={'service': 'vidyo'})
|
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
# using any(vidyo) instead of defaults since the event vc room locator
# includes the service and normalization skips values provided in 'defaults'
blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
|
Fix "make me room owner"
|
VC/Vidyo: Fix "make me room owner"
|
Python
|
mit
|
ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,indico/indico-plugins,indico/indico-plugins
|
python
|
## Code Before:
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
blueprint.add_url_rule('/event/<confId>/manage/videoconference/vidyo/<int:event_vc_room_id>/room-owner/',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',), defaults={'service': 'vidyo'})
## Instruction:
VC/Vidyo: Fix "make me room owner"
## Code After:
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_vc_vidyo.controllers import RHVidyoRoomOwner
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
# using any(vidyo) instead of defaults since the event vc room locator
# includes the service and normalization skips values provided in 'defaults'
blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
|
...
blueprint = IndicoPluginBlueprint('vc_vidyo', 'indico_vc_vidyo')
# Room management
# using any(vidyo) instead of defaults since the event vc room locator
# includes the service and normalization skips values provided in 'defaults'
blueprint.add_url_rule('/event/<confId>/manage/videoconference/<any(vidyo):service>/<int:event_vc_room_id>/room-owner',
'set_room_owner', RHVidyoRoomOwner, methods=('POST',))
...
|
8a534a9927ac0050b3182243c2b8bbf59127549e
|
test/multiple_invocations_test.py
|
test/multiple_invocations_test.py
|
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
|
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
|
Test two flow invocations after each other
|
Test two flow invocations after each other
|
Python
|
bsd-3-clause
|
lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow
|
python
|
## Code Before:
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
## Instruction:
Test two flow invocations after each other
## Code After:
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
|
// ... existing code ...
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
// ... modified code ...
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
// ... rest of the code ...
|
6e1befa9021494f5a63ccf2943570765d5b4c6e6
|
SessionManager.py
|
SessionManager.py
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
def error_message(errno):
sublime.error_message(messages.error(errno))
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
nameformat = settings.get('session_name_dateformat')
return datetime.now().strftime(nameformat)
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
try:
serialize.dump(session_name, session)
except OSError as e:
error_message(e.errno)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
def error_message(errno):
sublime.error_message(messages.error(errno))
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
nameformat = settings.get('session_name_dateformat')
return datetime.now().strftime(nameformat)
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
try:
serialize.dump(session_name, session)
except OSError as e:
error_message(e.errno)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if self.is_saveable(window):
return True
return False
@staticmethod
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
Make "is_saveable" a staticmethod of SaveSession
|
Make "is_saveable" a staticmethod of SaveSession
|
Python
|
mit
|
Zeeker/sublime-SessionManager
|
python
|
## Code Before:
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
def error_message(errno):
sublime.error_message(messages.error(errno))
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
nameformat = settings.get('session_name_dateformat')
return datetime.now().strftime(nameformat)
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
try:
serialize.dump(session_name, session)
except OSError as e:
error_message(e.errno)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
## Instruction:
Make "is_saveable" a staticmethod of SaveSession
## Code After:
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
def error_message(errno):
sublime.error_message(messages.error(errno))
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
nameformat = settings.get('session_name_dateformat')
return datetime.now().strftime(nameformat)
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
try:
serialize.dump(session_name, session)
except OSError as e:
error_message(e.errno)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if self.is_saveable(window):
return True
return False
@staticmethod
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
...
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if self.is_saveable(window):
return True
return False
@staticmethod
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
...
|
d97612dd4b37b4bb4ced0b72bd93d72b9112e293
|
src/main/java/com/hubspot/jinjava/lib/exptest/IsSameAsExpTest.java
|
src/main/java/com/hubspot/jinjava/lib/exptest/IsSameAsExpTest.java
|
package com.hubspot.jinjava.lib.exptest;
import com.hubspot.jinjava.doc.annotations.JinjavaDoc;
import com.hubspot.jinjava.doc.annotations.JinjavaParam;
import com.hubspot.jinjava.doc.annotations.JinjavaSnippet;
import com.hubspot.jinjava.interpret.InterpretException;
import com.hubspot.jinjava.interpret.JinjavaInterpreter;
@JinjavaDoc(value="Return true if variable is pointing at same object as other variable",
params=@JinjavaParam(value="other", type="object", desc="A second object to check the variables value against"),
snippets={
@JinjavaSnippet(
code="{% if var_one is sameas var_two %}\n" +
"<!--code to render if variables have the same value-->\n" +
"{% endif %}"),
}
)
public class IsSameAsExpTest implements ExpTest {
@Override
public String getName() {
return "sameas";
}
@Override
public boolean evaluate(Object var, JinjavaInterpreter interpreter,
Object... args) {
if(args.length == 0) {
throw new InterpretException(getName() + " test requires 1 argument");
}
return var == args[0];
}
}
|
package com.hubspot.jinjava.lib.exptest;
import com.hubspot.jinjava.doc.annotations.JinjavaDoc;
import com.hubspot.jinjava.doc.annotations.JinjavaParam;
import com.hubspot.jinjava.doc.annotations.JinjavaSnippet;
import com.hubspot.jinjava.interpret.InterpretException;
import com.hubspot.jinjava.interpret.JinjavaInterpreter;
@JinjavaDoc(value="Return true if variable is pointing at same object as other variable",
params=@JinjavaParam(value="other", type="object", desc="A second object to check the variables value against"),
snippets={
@JinjavaSnippet(
code="{% if var_one is sameas var_two %}\n" +
"<!--code to render if variables have the same value as one another-->\n" +
"{% endif %}"),
}
)
public class IsSameAsExpTest implements ExpTest {
@Override
public String getName() {
return "sameas";
}
@Override
public boolean evaluate(Object var, JinjavaInterpreter interpreter,
Object... args) {
if(args.length == 0) {
throw new InterpretException(getName() + " test requires 1 argument");
}
return var == args[0];
}
}
|
Convert more tabs to spaces to resolve errors
|
Convert more tabs to spaces to resolve errors
|
Java
|
apache-2.0
|
HubSpot/jinjava,HubSpot/jinjava,Mogztter/jinjava,Mogztter/jinjava,jaredstehler/jinjava,jaredstehler/jinjava
|
java
|
## Code Before:
package com.hubspot.jinjava.lib.exptest;
import com.hubspot.jinjava.doc.annotations.JinjavaDoc;
import com.hubspot.jinjava.doc.annotations.JinjavaParam;
import com.hubspot.jinjava.doc.annotations.JinjavaSnippet;
import com.hubspot.jinjava.interpret.InterpretException;
import com.hubspot.jinjava.interpret.JinjavaInterpreter;
@JinjavaDoc(value="Return true if variable is pointing at same object as other variable",
params=@JinjavaParam(value="other", type="object", desc="A second object to check the variables value against"),
snippets={
@JinjavaSnippet(
code="{% if var_one is sameas var_two %}\n" +
"<!--code to render if variables have the same value-->\n" +
"{% endif %}"),
}
)
public class IsSameAsExpTest implements ExpTest {
@Override
public String getName() {
return "sameas";
}
@Override
public boolean evaluate(Object var, JinjavaInterpreter interpreter,
Object... args) {
if(args.length == 0) {
throw new InterpretException(getName() + " test requires 1 argument");
}
return var == args[0];
}
}
## Instruction:
Convert more tabs to spaces to resolve errors
## Code After:
package com.hubspot.jinjava.lib.exptest;
import com.hubspot.jinjava.doc.annotations.JinjavaDoc;
import com.hubspot.jinjava.doc.annotations.JinjavaParam;
import com.hubspot.jinjava.doc.annotations.JinjavaSnippet;
import com.hubspot.jinjava.interpret.InterpretException;
import com.hubspot.jinjava.interpret.JinjavaInterpreter;
@JinjavaDoc(value="Return true if variable is pointing at same object as other variable",
params=@JinjavaParam(value="other", type="object", desc="A second object to check the variables value against"),
snippets={
@JinjavaSnippet(
code="{% if var_one is sameas var_two %}\n" +
"<!--code to render if variables have the same value as one another-->\n" +
"{% endif %}"),
}
)
public class IsSameAsExpTest implements ExpTest {
@Override
public String getName() {
return "sameas";
}
@Override
public boolean evaluate(Object var, JinjavaInterpreter interpreter,
Object... args) {
if(args.length == 0) {
throw new InterpretException(getName() + " test requires 1 argument");
}
return var == args[0];
}
}
|
# ... existing code ...
@JinjavaDoc(value="Return true if variable is pointing at same object as other variable",
params=@JinjavaParam(value="other", type="object", desc="A second object to check the variables value against"),
snippets={
@JinjavaSnippet(
code="{% if var_one is sameas var_two %}\n" +
"<!--code to render if variables have the same value as one another-->\n" +
"{% endif %}"),
}
)
# ... rest of the code ...
|
1f697a2c7bcf0f7769a9fc4f81be676ed5ee97c6
|
examples/flask/flask_seguro/cart.py
|
examples/flask/flask_seguro/cart.py
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
Fix dangerous default mutable value
|
Fix dangerous default mutable value
|
Python
|
mit
|
rgcarrasqueira/python-pagseguro,vintasoftware/python-pagseguro,rochacbruno/python-pagseguro
|
python
|
## Code Before:
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
## Instruction:
Fix dangerous default mutable value
## Code After:
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
# ... existing code ...
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
# ... rest of the code ...
|
87abaffc4ee3c5a694657821e55d66f7520e66cd
|
src/shake.h
|
src/shake.h
|
struct shakeDev;
/* libShake functions */
int shakeInit();
void shakeQuit();
void shakeListDevices();
int shakeNumOfDevices();
shakeDev *shakeOpen(unsigned int id);
void shakeClose(shakeDev *dev);
int shakeQuery(shakeDev *dev);
void shakeSetGain(shakeDev *dev, int gain);
void shakeInitEffect(shakeEffect *effect, shakeEffectType type);
int shakeUploadEffect(shakeDev *dev, shakeEffect effect);
void shakeEraseEffect(shakeDev *dev, int id);
void shakePlay(shakeDev *dev, int id);
void shakeStop(shakeDev *dev, int id);
#endif /* _SHAKE_H_ */
|
extern "C" {
#endif
#include "shake_private.h"
struct shakeDev;
/* libShake functions */
int shakeInit();
void shakeQuit();
void shakeListDevices();
int shakeNumOfDevices();
shakeDev *shakeOpen(unsigned int id);
void shakeClose(shakeDev *dev);
int shakeQuery(shakeDev *dev);
void shakeSetGain(shakeDev *dev, int gain);
void shakeInitEffect(shakeEffect *effect, shakeEffectType type);
int shakeUploadEffect(shakeDev *dev, shakeEffect effect);
void shakeEraseEffect(shakeDev *dev, int id);
void shakePlay(shakeDev *dev, int id);
void shakeStop(shakeDev *dev, int id);
#ifdef __cplusplus
}
#endif
#endif /* _SHAKE_H_ */
|
Use C calling convention when header is included by C++ code
|
Use C calling convention when header is included by C++ code
|
C
|
mit
|
zear/libShake,ShadowApex/libShake,ShadowApex/libShake,zear/libShake
|
c
|
## Code Before:
struct shakeDev;
/* libShake functions */
int shakeInit();
void shakeQuit();
void shakeListDevices();
int shakeNumOfDevices();
shakeDev *shakeOpen(unsigned int id);
void shakeClose(shakeDev *dev);
int shakeQuery(shakeDev *dev);
void shakeSetGain(shakeDev *dev, int gain);
void shakeInitEffect(shakeEffect *effect, shakeEffectType type);
int shakeUploadEffect(shakeDev *dev, shakeEffect effect);
void shakeEraseEffect(shakeDev *dev, int id);
void shakePlay(shakeDev *dev, int id);
void shakeStop(shakeDev *dev, int id);
#endif /* _SHAKE_H_ */
## Instruction:
Use C calling convention when header is included by C++ code
## Code After:
extern "C" {
#endif
#include "shake_private.h"
struct shakeDev;
/* libShake functions */
int shakeInit();
void shakeQuit();
void shakeListDevices();
int shakeNumOfDevices();
shakeDev *shakeOpen(unsigned int id);
void shakeClose(shakeDev *dev);
int shakeQuery(shakeDev *dev);
void shakeSetGain(shakeDev *dev, int gain);
void shakeInitEffect(shakeEffect *effect, shakeEffectType type);
int shakeUploadEffect(shakeDev *dev, shakeEffect effect);
void shakeEraseEffect(shakeDev *dev, int id);
void shakePlay(shakeDev *dev, int id);
void shakeStop(shakeDev *dev, int id);
#ifdef __cplusplus
}
#endif
#endif /* _SHAKE_H_ */
|
// ... existing code ...
extern "C" {
#endif
#include "shake_private.h"
struct shakeDev;
// ... modified code ...
void shakePlay(shakeDev *dev, int id);
void shakeStop(shakeDev *dev, int id);
#ifdef __cplusplus
}
#endif
#endif /* _SHAKE_H_ */
// ... rest of the code ...
|
04416cd9652a9fdc3ab58664ab4b96cbaff3f698
|
simuvex/s_event.py
|
simuvex/s_event.py
|
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
|
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
|
Python
|
bsd-2-clause
|
axt/angr,schieb/angr,angr/angr,tyb0807/angr,f-prettyland/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/angr,f-prettyland/angr,angr/angr,axt/angr,tyb0807/angr,iamahuman/angr,iamahuman/angr,chubbymaggie/angr,angr/simuvex,schieb/angr,iamahuman/angr,axt/angr,angr/angr,f-prettyland/angr,schieb/angr
|
python
|
## Code Before:
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
## Instruction:
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
## Code After:
import itertools
event_id_count = itertools.count()
class SimEvent(object):
#def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None):
def __init__(self, state, event_type, **kwargs):
self.id = event_id_count.next()
self.type = event_type
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys())
def _copy_event(self):
c = self.__class__.__new__(self.__class__)
c.id = self.id
c.type = self.type
c.bbl_addr = self.bbl_addr
c.stmt_idx = self.stmt_idx
c.sim_procedure = self.sim_procedure
c.objects = dict(self.objects)
return c
|
...
self.ins_addr = state.scratch.ins_addr
self.bbl_addr = state.scratch.bbl_addr
self.stmt_idx = state.scratch.stmt_idx
self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__
self.objects = dict(kwargs)
def __repr__(self):
...
|
34fda0b20a87b94d7413054bfcfc81dad0ecde19
|
utils/get_message.py
|
utils/get_message.py
|
import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
|
import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
Revert "Remove redundant functions (one too many levels of abstraction)@"
|
Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.
|
Python
|
mit
|
jdgillespie91/trackerSpend,jdgillespie91/trackerSpend
|
python
|
## Code Before:
import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
## Instruction:
Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.
## Code After:
import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
...
import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
...
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
...
|
2618ba5aee11274f3714d72af94f9d8f0fc562cd
|
cas-server-core-configuration/src/main/java/org/apereo/cas/configuration/model/core/web/security/AdminPagesSecurityProperties.java
|
cas-server-core-configuration/src/main/java/org/apereo/cas/configuration/model/core/web/security/AdminPagesSecurityProperties.java
|
package org.apereo.cas.configuration.model.core.web.security;
import org.springframework.core.io.Resource;
/**
* This is {@link AdminPagesSecurityProperties}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
public class AdminPagesSecurityProperties {
private String ip = "127\\.0\\.0\\.1";
private String adminRoles = "ROLE_ADMIN";
private String loginUrl;
private String service;
private Resource users;
private boolean actuatorEndpointsEnabled;
public boolean isActuatorEndpointsEnabled() {
return actuatorEndpointsEnabled;
}
public void setActuatorEndpointsEnabled(final boolean actuatorEndpointsEnabled) {
this.actuatorEndpointsEnabled = actuatorEndpointsEnabled;
}
public String getIp() {
return ip;
}
public void setIp(final String ip) {
this.ip = ip;
}
public String getAdminRoles() {
return adminRoles;
}
public void setAdminRoles(final String adminRoles) {
this.adminRoles = adminRoles;
}
public String getLoginUrl() {
return loginUrl;
}
public void setLoginUrl(final String loginUrl) {
this.loginUrl = loginUrl;
}
public String getService() {
return service;
}
public void setService(final String service) {
this.service = service;
}
public Resource getUsers() {
return users;
}
public void setUsers(final Resource users) {
this.users = users;
}
}
|
package org.apereo.cas.configuration.model.core.web.security;
import org.springframework.core.io.Resource;
/**
* This is {@link AdminPagesSecurityProperties}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
public class AdminPagesSecurityProperties {
private String ip = "127\\.0\\.0\\.1|0:0:0:0:0:0:0:1";
private String adminRoles = "ROLE_ADMIN";
private String loginUrl;
private String service;
private Resource users;
private boolean actuatorEndpointsEnabled;
public boolean isActuatorEndpointsEnabled() {
return actuatorEndpointsEnabled;
}
public void setActuatorEndpointsEnabled(final boolean actuatorEndpointsEnabled) {
this.actuatorEndpointsEnabled = actuatorEndpointsEnabled;
}
public String getIp() {
return ip;
}
public void setIp(final String ip) {
this.ip = ip;
}
public String getAdminRoles() {
return adminRoles;
}
public void setAdminRoles(final String adminRoles) {
this.adminRoles = adminRoles;
}
public String getLoginUrl() {
return loginUrl;
}
public void setLoginUrl(final String loginUrl) {
this.loginUrl = loginUrl;
}
public String getService() {
return service;
}
public void setService(final String service) {
this.service = service;
}
public Resource getUsers() {
return users;
}
public void setUsers(final Resource users) {
this.users = users;
}
}
|
Support ipv6 for status endpoint security
|
Support ipv6 for status endpoint security
|
Java
|
apache-2.0
|
leleuj/cas,philliprower/cas,fogbeam/cas_mirror,philliprower/cas,apereo/cas,apereo/cas,pdrados/cas,Jasig/cas,rkorn86/cas,leleuj/cas,pdrados/cas,leleuj/cas,philliprower/cas,apereo/cas,Jasig/cas,Jasig/cas,philliprower/cas,rkorn86/cas,fogbeam/cas_mirror,leleuj/cas,fogbeam/cas_mirror,pdrados/cas,rkorn86/cas,fogbeam/cas_mirror,Jasig/cas,pdrados/cas,rkorn86/cas,apereo/cas,pdrados/cas,apereo/cas,apereo/cas,pdrados/cas,leleuj/cas,philliprower/cas,apereo/cas,philliprower/cas,leleuj/cas,fogbeam/cas_mirror,fogbeam/cas_mirror,philliprower/cas
|
java
|
## Code Before:
package org.apereo.cas.configuration.model.core.web.security;
import org.springframework.core.io.Resource;
/**
* This is {@link AdminPagesSecurityProperties}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
public class AdminPagesSecurityProperties {
private String ip = "127\\.0\\.0\\.1";
private String adminRoles = "ROLE_ADMIN";
private String loginUrl;
private String service;
private Resource users;
private boolean actuatorEndpointsEnabled;
public boolean isActuatorEndpointsEnabled() {
return actuatorEndpointsEnabled;
}
public void setActuatorEndpointsEnabled(final boolean actuatorEndpointsEnabled) {
this.actuatorEndpointsEnabled = actuatorEndpointsEnabled;
}
public String getIp() {
return ip;
}
public void setIp(final String ip) {
this.ip = ip;
}
public String getAdminRoles() {
return adminRoles;
}
public void setAdminRoles(final String adminRoles) {
this.adminRoles = adminRoles;
}
public String getLoginUrl() {
return loginUrl;
}
public void setLoginUrl(final String loginUrl) {
this.loginUrl = loginUrl;
}
public String getService() {
return service;
}
public void setService(final String service) {
this.service = service;
}
public Resource getUsers() {
return users;
}
public void setUsers(final Resource users) {
this.users = users;
}
}
## Instruction:
Support ipv6 for status endpoint security
## Code After:
package org.apereo.cas.configuration.model.core.web.security;
import org.springframework.core.io.Resource;
/**
* This is {@link AdminPagesSecurityProperties}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
public class AdminPagesSecurityProperties {
private String ip = "127\\.0\\.0\\.1|0:0:0:0:0:0:0:1";
private String adminRoles = "ROLE_ADMIN";
private String loginUrl;
private String service;
private Resource users;
private boolean actuatorEndpointsEnabled;
public boolean isActuatorEndpointsEnabled() {
return actuatorEndpointsEnabled;
}
public void setActuatorEndpointsEnabled(final boolean actuatorEndpointsEnabled) {
this.actuatorEndpointsEnabled = actuatorEndpointsEnabled;
}
public String getIp() {
return ip;
}
public void setIp(final String ip) {
this.ip = ip;
}
public String getAdminRoles() {
return adminRoles;
}
public void setAdminRoles(final String adminRoles) {
this.adminRoles = adminRoles;
}
public String getLoginUrl() {
return loginUrl;
}
public void setLoginUrl(final String loginUrl) {
this.loginUrl = loginUrl;
}
public String getService() {
return service;
}
public void setService(final String service) {
this.service = service;
}
public Resource getUsers() {
return users;
}
public void setUsers(final Resource users) {
this.users = users;
}
}
|
...
public class AdminPagesSecurityProperties {
private String ip = "127\\.0\\.0\\.1|0:0:0:0:0:0:0:1";
private String adminRoles = "ROLE_ADMIN";
private String loginUrl;
private String service;
...
|
197c109ed2cb93268cba8c02066ac3df8812cd95
|
src/main/java/hello/Application.java
|
src/main/java/hello/Application.java
|
package hello;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
@ComponentScan
@EnableAutoConfiguration
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
package hello;
import org.apache.catalina.session.FileStore;
import org.apache.catalina.session.PersistentManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import java.io.File;
import java.util.Arrays;
@Configuration
@ComponentScan
@EnableAutoConfiguration
public class Application {
private Log log = LogFactory.getLog(Application.class);
@Bean
public EmbeddedServletContainerCustomizer containerCustomizer() {
return factory -> {
TomcatEmbeddedServletContainerFactory containerFactory = (TomcatEmbeddedServletContainerFactory) factory;
containerFactory.setTomcatContextCustomizers(Arrays.asList(context -> {
final PersistentManager persistentManager = new PersistentManager();
final FileStore store = new FileStore();
final String sessionDirectory = makeSessionDirectory();
log.info("Writing sessions to " + sessionDirectory);
store.setDirectory(sessionDirectory);
persistentManager.setStore(store);
context.setManager(persistentManager);
}));
};
}
private String makeSessionDirectory() {
final String cwd = System.getProperty("user.dir");
return cwd + File.separator + "sessions";
}
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
Use persistent file based sessions that survive application restarts.
|
Use persistent file based sessions that survive application restarts.
|
Java
|
apache-2.0
|
gsopu8065/spring-boot-spring-loaded-java8-example,joakim666/spring-boot-spring-loaded-java8-example,joakim666/spring-boot-spring-loaded-java8-example,gsopu8065/spring-boot-spring-loaded-java8-example
|
java
|
## Code Before:
package hello;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
@ComponentScan
@EnableAutoConfiguration
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
## Instruction:
Use persistent file based sessions that survive application restarts.
## Code After:
package hello;
import org.apache.catalina.session.FileStore;
import org.apache.catalina.session.PersistentManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import java.io.File;
import java.util.Arrays;
@Configuration
@ComponentScan
@EnableAutoConfiguration
public class Application {
private Log log = LogFactory.getLog(Application.class);
@Bean
public EmbeddedServletContainerCustomizer containerCustomizer() {
return factory -> {
TomcatEmbeddedServletContainerFactory containerFactory = (TomcatEmbeddedServletContainerFactory) factory;
containerFactory.setTomcatContextCustomizers(Arrays.asList(context -> {
final PersistentManager persistentManager = new PersistentManager();
final FileStore store = new FileStore();
final String sessionDirectory = makeSessionDirectory();
log.info("Writing sessions to " + sessionDirectory);
store.setDirectory(sessionDirectory);
persistentManager.setStore(store);
context.setManager(persistentManager);
}));
};
}
private String makeSessionDirectory() {
final String cwd = System.getProperty("user.dir");
return cwd + File.separator + "sessions";
}
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
// ... existing code ...
package hello;
import org.apache.catalina.session.FileStore;
import org.apache.catalina.session.PersistentManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import java.io.File;
import java.util.Arrays;
@Configuration
@ComponentScan
@EnableAutoConfiguration
public class Application {
private Log log = LogFactory.getLog(Application.class);
@Bean
public EmbeddedServletContainerCustomizer containerCustomizer() {
return factory -> {
TomcatEmbeddedServletContainerFactory containerFactory = (TomcatEmbeddedServletContainerFactory) factory;
containerFactory.setTomcatContextCustomizers(Arrays.asList(context -> {
final PersistentManager persistentManager = new PersistentManager();
final FileStore store = new FileStore();
final String sessionDirectory = makeSessionDirectory();
log.info("Writing sessions to " + sessionDirectory);
store.setDirectory(sessionDirectory);
persistentManager.setStore(store);
context.setManager(persistentManager);
}));
};
}
private String makeSessionDirectory() {
final String cwd = System.getProperty("user.dir");
return cwd + File.separator + "sessions";
}
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
// ... rest of the code ...
|
4802f41e8d012781c5b21f8fef43007c6b529e5e
|
Magic/src/main/java/com/elmakers/mine/bukkit/action/CheckAction.java
|
Magic/src/main/java/com/elmakers/mine/bukkit/action/CheckAction.java
|
package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.action.ActionHandler;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
public abstract class CheckAction extends CompoundAction {
protected abstract boolean isAllowed(CastContext context);
@Override
public SpellResult step(CastContext context) {
boolean allowed = isAllowed(context);
ActionHandler actions = getHandler("actions");
if (actions == null || actions.size() == 0) {
return allowed ? SpellResult.CAST : SpellResult.STOP;
}
if (!allowed) {
return SpellResult.NO_TARGET;
}
return startActions();
}
}
|
package com.elmakers.mine.bukkit.action;
import org.bukkit.configuration.ConfigurationSection;
import com.elmakers.mine.bukkit.api.action.ActionHandler;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
public abstract class CheckAction extends CompoundAction {
protected abstract boolean isAllowed(CastContext context);
@Override
protected void addHandlers(Spell spell, ConfigurationSection parameters) {
addHandler(spell, "actions");
addHandler(spell, "fail");
}
@Override
public SpellResult step(CastContext context) {
boolean allowed = isAllowed(context);
if (!allowed) {
ActionHandler fail = getHandler("fail");
if (fail != null && fail.size() != 0) {
return startActions("fail");
}
}
ActionHandler actions = getHandler("actions");
if (actions == null || actions.size() == 0) {
return allowed ? SpellResult.CAST : SpellResult.STOP;
}
if (!allowed) {
return SpellResult.NO_TARGET;
}
return startActions();
}
}
|
Add optional "fail" action handler to Check actions
|
Add optional "fail" action handler to Check actions
|
Java
|
mit
|
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
|
java
|
## Code Before:
package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.action.ActionHandler;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
public abstract class CheckAction extends CompoundAction {
protected abstract boolean isAllowed(CastContext context);
@Override
public SpellResult step(CastContext context) {
boolean allowed = isAllowed(context);
ActionHandler actions = getHandler("actions");
if (actions == null || actions.size() == 0) {
return allowed ? SpellResult.CAST : SpellResult.STOP;
}
if (!allowed) {
return SpellResult.NO_TARGET;
}
return startActions();
}
}
## Instruction:
Add optional "fail" action handler to Check actions
## Code After:
package com.elmakers.mine.bukkit.action;
import org.bukkit.configuration.ConfigurationSection;
import com.elmakers.mine.bukkit.api.action.ActionHandler;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
public abstract class CheckAction extends CompoundAction {
protected abstract boolean isAllowed(CastContext context);
@Override
protected void addHandlers(Spell spell, ConfigurationSection parameters) {
addHandler(spell, "actions");
addHandler(spell, "fail");
}
@Override
public SpellResult step(CastContext context) {
boolean allowed = isAllowed(context);
if (!allowed) {
ActionHandler fail = getHandler("fail");
if (fail != null && fail.size() != 0) {
return startActions("fail");
}
}
ActionHandler actions = getHandler("actions");
if (actions == null || actions.size() == 0) {
return allowed ? SpellResult.CAST : SpellResult.STOP;
}
if (!allowed) {
return SpellResult.NO_TARGET;
}
return startActions();
}
}
|
// ... existing code ...
package com.elmakers.mine.bukkit.action;
import org.bukkit.configuration.ConfigurationSection;
import com.elmakers.mine.bukkit.api.action.ActionHandler;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
public abstract class CheckAction extends CompoundAction {
// ... modified code ...
protected abstract boolean isAllowed(CastContext context);
@Override
protected void addHandlers(Spell spell, ConfigurationSection parameters) {
addHandler(spell, "actions");
addHandler(spell, "fail");
}
@Override
public SpellResult step(CastContext context) {
boolean allowed = isAllowed(context);
if (!allowed) {
ActionHandler fail = getHandler("fail");
if (fail != null && fail.size() != 0) {
return startActions("fail");
}
}
ActionHandler actions = getHandler("actions");
if (actions == null || actions.size() == 0) {
return allowed ? SpellResult.CAST : SpellResult.STOP;
// ... rest of the code ...
|
7b9661e1d1d78c7558123e4865a32c60e84d8049
|
src/graphics.h
|
src/graphics.h
|
typedef struct {
int num_goats_to_put;
int num_eaten_goats;
player_turn_t turn;
mvt_t input;
mvt_t *possible_mvts;
board_t *board;
} state_to_draw_t;
#endif
|
typedef struct {
int num_goats_to_put;
int num_eaten_goats;
player_turn_t turn;
mvt_t input;
mvt_t *possible_mvts;
size_t num_possible_mvts;
board_t *board;
} state_to_draw_t;
#endif
|
Fix state_to_draw_t by adding the number of possible mvts
|
Fix state_to_draw_t by adding the number of possible mvts
|
C
|
mit
|
moverest/bagh-chal,moverest/bagh-chal,moverest/bagh-chal
|
c
|
## Code Before:
typedef struct {
int num_goats_to_put;
int num_eaten_goats;
player_turn_t turn;
mvt_t input;
mvt_t *possible_mvts;
board_t *board;
} state_to_draw_t;
#endif
## Instruction:
Fix state_to_draw_t by adding the number of possible mvts
## Code After:
typedef struct {
int num_goats_to_put;
int num_eaten_goats;
player_turn_t turn;
mvt_t input;
mvt_t *possible_mvts;
size_t num_possible_mvts;
board_t *board;
} state_to_draw_t;
#endif
|
...
player_turn_t turn;
mvt_t input;
mvt_t *possible_mvts;
size_t num_possible_mvts;
board_t *board;
} state_to_draw_t;
...
|
23fc427f084b7b63322dc28e78eeb678bee779c0
|
squidb/src/com/yahoo/squidb/data/SqlValidatorFactory.java
|
squidb/src/com/yahoo/squidb/data/SqlValidatorFactory.java
|
package com.yahoo.squidb.data;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import com.yahoo.squidb.data.SqlValidatorFactory.SqlValidator;
/*package*/ class SqlValidatorFactory {
interface SqlValidator {
void compileStatement(SQLiteDatabase db, String sql);
}
private static final SqlValidator INSTANCE;
static {
int version = VERSION.SDK_INT;
if (version >= VERSION_CODES.JELLY_BEAN) {
INSTANCE = new DefaultSqlValidator();
} else if (version >= VERSION_CODES.ICE_CREAM_SANDWICH) {
INSTANCE = new IcsSqlValidator();
} else {
// included for forks that reduce minSdk below 14
INSTANCE = new DefaultSqlValidator();
}
}
private SqlValidatorFactory() {
//no instance
}
static SqlValidator getValidator() {
return INSTANCE;
}
}
/*package*/ class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
/*package*/ class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
|
package com.yahoo.squidb.data;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
/*package*/ class SqlValidatorFactory {
interface SqlValidator {
void compileStatement(SQLiteDatabase db, String sql);
}
private static final SqlValidator INSTANCE;
static {
int version = VERSION.SDK_INT;
if (version >= VERSION_CODES.JELLY_BEAN) {
INSTANCE = new DefaultSqlValidator();
} else if (version >= VERSION_CODES.ICE_CREAM_SANDWICH) {
INSTANCE = new IcsSqlValidator();
} else {
// included for forks that reduce minSdk below 14
INSTANCE = new DefaultSqlValidator();
}
}
private SqlValidatorFactory() {
//no instance
}
static SqlValidator getValidator() {
return INSTANCE;
}
private static class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
private static class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
}
|
Make sql validators private static
|
Make sql validators private static
|
Java
|
apache-2.0
|
KrzysztofWrobel/squidb,KrzysztofWrobel/squidb,KrzysztofWrobel/squidb
|
java
|
## Code Before:
package com.yahoo.squidb.data;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import com.yahoo.squidb.data.SqlValidatorFactory.SqlValidator;
/*package*/ class SqlValidatorFactory {
interface SqlValidator {
void compileStatement(SQLiteDatabase db, String sql);
}
private static final SqlValidator INSTANCE;
static {
int version = VERSION.SDK_INT;
if (version >= VERSION_CODES.JELLY_BEAN) {
INSTANCE = new DefaultSqlValidator();
} else if (version >= VERSION_CODES.ICE_CREAM_SANDWICH) {
INSTANCE = new IcsSqlValidator();
} else {
// included for forks that reduce minSdk below 14
INSTANCE = new DefaultSqlValidator();
}
}
private SqlValidatorFactory() {
//no instance
}
static SqlValidator getValidator() {
return INSTANCE;
}
}
/*package*/ class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
/*package*/ class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
## Instruction:
Make sql validators private static
## Code After:
package com.yahoo.squidb.data;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
/*package*/ class SqlValidatorFactory {
interface SqlValidator {
void compileStatement(SQLiteDatabase db, String sql);
}
private static final SqlValidator INSTANCE;
static {
int version = VERSION.SDK_INT;
if (version >= VERSION_CODES.JELLY_BEAN) {
INSTANCE = new DefaultSqlValidator();
} else if (version >= VERSION_CODES.ICE_CREAM_SANDWICH) {
INSTANCE = new IcsSqlValidator();
} else {
// included for forks that reduce minSdk below 14
INSTANCE = new DefaultSqlValidator();
}
}
private SqlValidatorFactory() {
//no instance
}
static SqlValidator getValidator() {
return INSTANCE;
}
private static class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
private static class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
}
|
# ... existing code ...
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
/*package*/ class SqlValidatorFactory {
# ... modified code ...
static SqlValidator getValidator() {
return INSTANCE;
}
private static class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
private static class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
}
# ... rest of the code ...
|
20c52fdaf8f0eaefd9d857b37d89e7b429cc3013
|
tests/functional/test_requests.py
|
tests/functional/test_requests.py
|
from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
|
def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
|
Update requests test for the new funcargs
|
Update requests test for the new funcargs
|
Python
|
mit
|
natefoo/pip,mujiansu/pip,harrisonfeng/pip,xavfernandez/pip,James-Firth/pip,nthall/pip,mujiansu/pip,graingert/pip,prasaianooz/pip,techtonik/pip,zorosteven/pip,rbtcollins/pip,ianw/pip,prasaianooz/pip,zorosteven/pip,ianw/pip,cjerdonek/pip,tdsmith/pip,habnabit/pip,ChristopherHogan/pip,yati-sagade/pip,alex/pip,zvezdan/pip,fiber-space/pip,alquerci/pip,willingc/pip,atdaemon/pip,pradyunsg/pip,xavfernandez/pip,pjdelport/pip,erikrose/pip,jamezpolley/pip,RonnyPfannschmidt/pip,rbtcollins/pip,supriyantomaftuh/pip,James-Firth/pip,pfmoore/pip,squidsoup/pip,davidovich/pip,habnabit/pip,erikrose/pip,msabramo/pip,qwcode/pip,patricklaw/pip,haridsv/pip,wkeyword/pip,jmagnusson/pip,ncoghlan/pip,xavfernandez/pip,zvezdan/pip,jythontools/pip,zenlambda/pip,msabramo/pip,mattrobenolt/pip,dstufft/pip,jasonkying/pip,mindw/pip,Carreau/pip,qbdsoft/pip,Carreau/pip,habnabit/pip,alex/pip,jmagnusson/pip,pypa/pip,dstufft/pip,jmagnusson/pip,haridsv/pip,blarghmatey/pip,caosmo/pip,qbdsoft/pip,mindw/pip,pjdelport/pip,jasonkying/pip,ChristopherHogan/pip,cjerdonek/pip,mujiansu/pip,tdsmith/pip,h4ck3rm1k3/pip,rouge8/pip,Ivoz/pip,KarelJakubec/pip,blarghmatey/pip,Gabriel439/pip,chaoallsome/pip,wkeyword/pip,luzfcb/pip,atdaemon/pip,wkeyword/pip,willingc/pip,sigmavirus24/pip,RonnyPfannschmidt/pip,minrk/pip,atdaemon/pip,James-Firth/pip,benesch/pip,jamezpolley/pip,rouge8/pip,pjdelport/pip,ncoghlan/pip,fiber-space/pip,ChristopherHogan/pip,minrk/pip,natefoo/pip,qbdsoft/pip,sigmavirus24/pip,willingc/pip,dstufft/pip,haridsv/pip,yati-sagade/pip,alquerci/pip,davidovich/pip,blarghmatey/pip,squidsoup/pip,esc/pip,graingert/pip,zvezdan/pip,benesch/pip,esc/pip,supriyantomaftuh/pip,KarelJakubec/pip,sbidoul/pip,nthall/pip,pfmoore/pip,luzfcb/pip,Gabriel439/pip,jamezpolley/pip,jythontools/pip,chaoallsome/pip,fiber-space/pip,caosmo/pip,h4ck3rm1k3/pip,zenlambda/pip,sbidoul/pip,alex/pip,patricklaw/pip,techtonik/pip,jasonkying/pip,tdsmith/pip,jythontools/pip,squidsoup/pip,graingert/pip,yati-sagade/pip,rbtcollins/pip,esc/pip,mattrobenolt/pip,nthall/pip,zorosteven/pip,pypa/pip,Ivoz/pip,supriyantomaftuh/pip,KarelJakubec/pip,harrisonfeng/pip,techtonik/pip,ncoghlan/pip,prasaianooz/pip,chaoallsome/pip,zenlambda/pip,pradyunsg/pip,qwcode/pip,Gabriel439/pip,harrisonfeng/pip,benesch/pip,luzfcb/pip,sigmavirus24/pip,RonnyPfannschmidt/pip,mindw/pip,h4ck3rm1k3/pip,davidovich/pip,erikrose/pip,caosmo/pip,natefoo/pip,rouge8/pip
|
python
|
## Code Before:
from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
## Instruction:
Update requests test for the new funcargs
## Code After:
def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
|
...
def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
...
|
fd1783df3648cdb80b32ae41ffd1d9e1ccb23196
|
tests/ex25_tests.py
|
tests/ex25_tests.py
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
|
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
Fix a copy paste fail
|
Fix a copy paste fail
|
Python
|
mit
|
gravyboat/python-exercises
|
python
|
## Code Before:
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(third_person_form, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(third_person_form, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(third_person_form, 'grabs')
## Instruction:
Fix a copy paste fail
## Code After:
from nose.tools import *
from exercises import ex25
def test_make_ing_form_ie():
'''
Test for ie match
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
'''
Test for e match
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
'''
Test for other matches
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
|
// ... existing code ...
'''
present_verb = ex25.make_ing_form('tie')
assert_equal(present_verb, 'tying')
def test_make_ing_form_e():
// ... modified code ...
'''
present_verb = ex25.make_ing_form('grate')
assert_equal(present_verb, 'grating')
def test_make_ing_form_s():
...
'''
present_verb = ex25.make_ing_form('grab')
assert_equal(present_verb, 'grabs')
// ... rest of the code ...
|
4bb566b0b02f1b1d4b7053508fc890cb479ded5b
|
src/main/java/com/pinterest/secor/io/impl/AdgearDeliveryJsonReader.java
|
src/main/java/com/pinterest/secor/io/impl/AdgearDeliveryJsonReader.java
|
package com.pinterest.secor.io.impl;
import com.pinterest.secor.common.SecorConfig;
import com.pinterest.secor.io.AdgearReader;
import com.pinterest.secor.io.KeyValue;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
// Converts delivery JSON to Beh TSV
public class AdgearDeliveryJsonReader implements AdgearReader {
private final String timestampFieldname;
public AdgearDeliveryJsonReader(SecorConfig secorConfig) {
timestampFieldname = secorConfig.getMessageTimestampName();
}
public String convert(KeyValue kv) {
JSONObject jsonObject = null;
Object value = JSONValue.parse(kv.getValue());
if (value instanceof JSONObject) {
jsonObject = (JSONObject) value;
} else {
return null;
}
Double timestamp = (Double) jsonObject.get(timestampFieldname);
String cookieId = (String) jsonObject.get("uid");
Integer segmentId = (Integer) jsonObject.get("segment_id");
Boolean segmentIsNew = (Boolean) jsonObject.get("segment_new");
if (timestamp == null || cookieId == null || segmentId == null
|| segmentIsNew == null || !segmentIsNew) {
return null;
}
return String.format("%s\t%d\tseg:%d\n",
cookieId, Math.round(timestamp), segmentId);
}
}
|
package com.pinterest.secor.io.impl;
import com.pinterest.secor.common.SecorConfig;
import com.pinterest.secor.io.AdgearReader;
import com.pinterest.secor.io.KeyValue;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
// Converts delivery JSON to Beh TSV
public class AdgearDeliveryJsonReader implements AdgearReader {
private final String timestampFieldname;
public AdgearDeliveryJsonReader(SecorConfig secorConfig) {
timestampFieldname = secorConfig.getMessageTimestampName();
}
public String convert(KeyValue kv) {
JSONObject jsonObject = null;
Object value = JSONValue.parse(kv.getValue());
if (value instanceof JSONObject) {
jsonObject = (JSONObject) value;
} else {
return null;
}
Double timestamp = (Double) jsonObject.get(timestampFieldname);
String cookieId = (String) jsonObject.get("uid");
Integer buyerId = (Integer) jsonObject.get("buyer_id");
Integer segmentId = (Integer) jsonObject.get("segment_id");
Boolean segmentIsNew = (Boolean) jsonObject.get("segment_new");
if (timestamp == null || buyerId == null || cookieId == null || segmentId == null
|| segmentIsNew == null || !segmentIsNew) {
return null;
}
return String.format("%s\t%d\t%d:seg:%d\n",
cookieId, Math.round(timestamp), buyerId, segmentId);
}
}
|
Write buyer ID when converting delivery logs.
|
Write buyer ID when converting delivery logs.
|
Java
|
apache-2.0
|
adgear/secor,adgear/secor
|
java
|
## Code Before:
package com.pinterest.secor.io.impl;
import com.pinterest.secor.common.SecorConfig;
import com.pinterest.secor.io.AdgearReader;
import com.pinterest.secor.io.KeyValue;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
// Converts delivery JSON to Beh TSV
public class AdgearDeliveryJsonReader implements AdgearReader {
private final String timestampFieldname;
public AdgearDeliveryJsonReader(SecorConfig secorConfig) {
timestampFieldname = secorConfig.getMessageTimestampName();
}
public String convert(KeyValue kv) {
JSONObject jsonObject = null;
Object value = JSONValue.parse(kv.getValue());
if (value instanceof JSONObject) {
jsonObject = (JSONObject) value;
} else {
return null;
}
Double timestamp = (Double) jsonObject.get(timestampFieldname);
String cookieId = (String) jsonObject.get("uid");
Integer segmentId = (Integer) jsonObject.get("segment_id");
Boolean segmentIsNew = (Boolean) jsonObject.get("segment_new");
if (timestamp == null || cookieId == null || segmentId == null
|| segmentIsNew == null || !segmentIsNew) {
return null;
}
return String.format("%s\t%d\tseg:%d\n",
cookieId, Math.round(timestamp), segmentId);
}
}
## Instruction:
Write buyer ID when converting delivery logs.
## Code After:
package com.pinterest.secor.io.impl;
import com.pinterest.secor.common.SecorConfig;
import com.pinterest.secor.io.AdgearReader;
import com.pinterest.secor.io.KeyValue;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
// Converts delivery JSON to Beh TSV
public class AdgearDeliveryJsonReader implements AdgearReader {
private final String timestampFieldname;
public AdgearDeliveryJsonReader(SecorConfig secorConfig) {
timestampFieldname = secorConfig.getMessageTimestampName();
}
public String convert(KeyValue kv) {
JSONObject jsonObject = null;
Object value = JSONValue.parse(kv.getValue());
if (value instanceof JSONObject) {
jsonObject = (JSONObject) value;
} else {
return null;
}
Double timestamp = (Double) jsonObject.get(timestampFieldname);
String cookieId = (String) jsonObject.get("uid");
Integer buyerId = (Integer) jsonObject.get("buyer_id");
Integer segmentId = (Integer) jsonObject.get("segment_id");
Boolean segmentIsNew = (Boolean) jsonObject.get("segment_new");
if (timestamp == null || buyerId == null || cookieId == null || segmentId == null
|| segmentIsNew == null || !segmentIsNew) {
return null;
}
return String.format("%s\t%d\t%d:seg:%d\n",
cookieId, Math.round(timestamp), buyerId, segmentId);
}
}
|
// ... existing code ...
Double timestamp = (Double) jsonObject.get(timestampFieldname);
String cookieId = (String) jsonObject.get("uid");
Integer buyerId = (Integer) jsonObject.get("buyer_id");
Integer segmentId = (Integer) jsonObject.get("segment_id");
Boolean segmentIsNew = (Boolean) jsonObject.get("segment_new");
if (timestamp == null || buyerId == null || cookieId == null || segmentId == null
|| segmentIsNew == null || !segmentIsNew) {
return null;
}
return String.format("%s\t%d\t%d:seg:%d\n",
cookieId, Math.round(timestamp), buyerId, segmentId);
}
}
// ... rest of the code ...
|
64042be2b6febf64d601adaa6f85a542ae9b876d
|
sunpy/instr/iris/iris.py
|
sunpy/instr/iris/iris.py
|
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
|
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
|
Change hdu[0] to hdu for optional indexing
|
Change hdu[0] to hdu for optional indexing
|
Python
|
bsd-2-clause
|
Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy
|
python
|
## Code Before:
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
## Instruction:
Change hdu[0] to hdu for optional indexing
## Code After:
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
|
// ... existing code ...
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
// ... modified code ...
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
...
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
// ... rest of the code ...
|
26b91d30dff8c4034809dc5783d9ff5f44b18ede
|
src/main/java/de/retest/recheck/ui/descriptors/TextAttribute.java
|
src/main/java/de/retest/recheck/ui/descriptors/TextAttribute.java
|
package de.retest.recheck.ui.descriptors;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlRootElement;
import de.retest.recheck.util.StringSimilarity;
@XmlRootElement
public class TextAttribute extends StringAttribute {
private static final long serialVersionUID = 1L;
// Used by JaxB
protected TextAttribute() {}
public TextAttribute( final String key, final String value ) {
this( key, value, null );
}
public TextAttribute( final String key, final String value, final String variableName ) {
super( key, value != null ? value.trim() : null, variableName );
}
@Override
public double match( final Attribute other ) {
if ( !(other instanceof TextAttribute) ) {
return NO_MATCH;
}
assert other.getKey().equals( getKey() );
return StringSimilarity.textSimilarity( getValue(), ((StringAttribute) other).getValue() );
}
@Override
public Attribute applyChanges( final Serializable actual ) {
return new TextAttribute( getKey(), (String) actual, getVariableName() );
}
@Override
public ParameterizedAttribute applyVariableChange( final String variableName ) {
return new TextAttribute( getKey(), getValue(), variableName );
}
}
|
package de.retest.recheck.ui.descriptors;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlRootElement;
import de.retest.recheck.util.StringSimilarity;
@XmlRootElement
public class TextAttribute extends StringAttribute {
private static final long serialVersionUID = 1L;
// Used by JaxB
protected TextAttribute() {}
public TextAttribute( final String key, final String value ) {
this( key, value, null );
}
public TextAttribute( final String key, final String value, final String variableName ) {
super( key, value, variableName );
}
@Override
public double match( final Attribute other ) {
if ( !(other instanceof TextAttribute) ) {
return NO_MATCH;
}
assert other.getKey().equals( getKey() );
return StringSimilarity.textSimilarity( getValue(), ((StringAttribute) other).getValue() );
}
@Override
public Attribute applyChanges( final Serializable actual ) {
return new TextAttribute( getKey(), (String) actual, getVariableName() );
}
@Override
public ParameterizedAttribute applyVariableChange( final String variableName ) {
return new TextAttribute( getKey(), getValue(), variableName );
}
}
|
Fix value should be given formatted
|
Fix value should be given formatted
|
Java
|
agpl-3.0
|
retest/recheck,retest/recheck
|
java
|
## Code Before:
package de.retest.recheck.ui.descriptors;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlRootElement;
import de.retest.recheck.util.StringSimilarity;
@XmlRootElement
public class TextAttribute extends StringAttribute {
private static final long serialVersionUID = 1L;
// Used by JaxB
protected TextAttribute() {}
public TextAttribute( final String key, final String value ) {
this( key, value, null );
}
public TextAttribute( final String key, final String value, final String variableName ) {
super( key, value != null ? value.trim() : null, variableName );
}
@Override
public double match( final Attribute other ) {
if ( !(other instanceof TextAttribute) ) {
return NO_MATCH;
}
assert other.getKey().equals( getKey() );
return StringSimilarity.textSimilarity( getValue(), ((StringAttribute) other).getValue() );
}
@Override
public Attribute applyChanges( final Serializable actual ) {
return new TextAttribute( getKey(), (String) actual, getVariableName() );
}
@Override
public ParameterizedAttribute applyVariableChange( final String variableName ) {
return new TextAttribute( getKey(), getValue(), variableName );
}
}
## Instruction:
Fix value should be given formatted
## Code After:
package de.retest.recheck.ui.descriptors;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlRootElement;
import de.retest.recheck.util.StringSimilarity;
@XmlRootElement
public class TextAttribute extends StringAttribute {
private static final long serialVersionUID = 1L;
// Used by JaxB
protected TextAttribute() {}
public TextAttribute( final String key, final String value ) {
this( key, value, null );
}
public TextAttribute( final String key, final String value, final String variableName ) {
super( key, value, variableName );
}
@Override
public double match( final Attribute other ) {
if ( !(other instanceof TextAttribute) ) {
return NO_MATCH;
}
assert other.getKey().equals( getKey() );
return StringSimilarity.textSimilarity( getValue(), ((StringAttribute) other).getValue() );
}
@Override
public Attribute applyChanges( final Serializable actual ) {
return new TextAttribute( getKey(), (String) actual, getVariableName() );
}
@Override
public ParameterizedAttribute applyVariableChange( final String variableName ) {
return new TextAttribute( getKey(), getValue(), variableName );
}
}
|
# ... existing code ...
}
public TextAttribute( final String key, final String value, final String variableName ) {
super( key, value, variableName );
}
@Override
# ... rest of the code ...
|
e52bc2326bc9248470772ef8dc72a2ba5e3c3578
|
app/src/main/java/com/elpatika/stepic/view/SplashActivity.java
|
app/src/main/java/com/elpatika/stepic/view/SplashActivity.java
|
package com.elpatika.stepic.view;
import android.os.Bundle;
import android.os.Handler;
import com.elpatika.stepic.R;
import com.elpatika.stepic.base.BaseFragmentActivity;
public class SplashActivity extends BaseFragmentActivity {
// Splash screen wait time
private static final int SPLASH_TIME_OUT = 3000;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//This stops from opening again from the Splash screen when minimized
if (!isTaskRoot()) {
finish();
return;
}
setContentView(R.layout.activity_splash);
overridePendingTransition(R.anim.slide_in_from_end, R.anim.slide_out_to_start);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
mShell.getScreenProvider().showLaunchScreen(SplashActivity.this, false);
finish();
}
}, SPLASH_TIME_OUT);
}
}
|
package com.elpatika.stepic.view;
import android.os.Bundle;
import android.os.Handler;
import com.elpatika.stepic.R;
import com.elpatika.stepic.base.BaseFragmentActivity;
public class SplashActivity extends BaseFragmentActivity {
// Splash screen wait time
private static final int SPLASH_TIME_OUT = 3000;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//This stops from opening again from the Splash screen when minimized
if (!isTaskRoot()) {
finish();
return;
}
setContentView(R.layout.activity_splash);
overridePendingTransition(R.anim.slide_in_from_end, R.anim.slide_out_to_start);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (!isFinishing()) {
mShell.getScreenProvider().showLaunchScreen(SplashActivity.this, false);
finish();
}
}
}, SPLASH_TIME_OUT);
}
}
|
Fix bug with second start splash activity
|
Fix bug with second start splash activity
|
Java
|
apache-2.0
|
StepicOrg/stepik-android,StepicOrg/stepic-android,StepicOrg/stepik-android,StepicOrg/stepic-android,StepicOrg/stepik-android,StepicOrg/stepic-android
|
java
|
## Code Before:
package com.elpatika.stepic.view;
import android.os.Bundle;
import android.os.Handler;
import com.elpatika.stepic.R;
import com.elpatika.stepic.base.BaseFragmentActivity;
public class SplashActivity extends BaseFragmentActivity {
// Splash screen wait time
private static final int SPLASH_TIME_OUT = 3000;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//This stops from opening again from the Splash screen when minimized
if (!isTaskRoot()) {
finish();
return;
}
setContentView(R.layout.activity_splash);
overridePendingTransition(R.anim.slide_in_from_end, R.anim.slide_out_to_start);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
mShell.getScreenProvider().showLaunchScreen(SplashActivity.this, false);
finish();
}
}, SPLASH_TIME_OUT);
}
}
## Instruction:
Fix bug with second start splash activity
## Code After:
package com.elpatika.stepic.view;
import android.os.Bundle;
import android.os.Handler;
import com.elpatika.stepic.R;
import com.elpatika.stepic.base.BaseFragmentActivity;
public class SplashActivity extends BaseFragmentActivity {
// Splash screen wait time
private static final int SPLASH_TIME_OUT = 3000;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//This stops from opening again from the Splash screen when minimized
if (!isTaskRoot()) {
finish();
return;
}
setContentView(R.layout.activity_splash);
overridePendingTransition(R.anim.slide_in_from_end, R.anim.slide_out_to_start);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (!isFinishing()) {
mShell.getScreenProvider().showLaunchScreen(SplashActivity.this, false);
finish();
}
}
}, SPLASH_TIME_OUT);
}
}
|
# ... existing code ...
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (!isFinishing()) {
mShell.getScreenProvider().showLaunchScreen(SplashActivity.this, false);
finish();
}
}
}, SPLASH_TIME_OUT);
# ... rest of the code ...
|
9ec8d2b01e0f8aefc9d4c2c82c22af6f8c48a75b
|
usingnamespace/api/interfaces.py
|
usingnamespace/api/interfaces.py
|
from zope.interface import Interface
class ISerializer(Interface):
"""Marker Interface"""
|
from zope.interface import Interface
class ISerializer(Interface):
"""Marker Interface"""
class IDigestMethod(Interface):
"""Marker Interface"""
|
Add new marker interface for a digest method
|
Add new marker interface for a digest method
|
Python
|
isc
|
usingnamespace/usingnamespace
|
python
|
## Code Before:
from zope.interface import Interface
class ISerializer(Interface):
"""Marker Interface"""
## Instruction:
Add new marker interface for a digest method
## Code After:
from zope.interface import Interface
class ISerializer(Interface):
"""Marker Interface"""
class IDigestMethod(Interface):
"""Marker Interface"""
|
...
class ISerializer(Interface):
"""Marker Interface"""
class IDigestMethod(Interface):
"""Marker Interface"""
...
|
315bc9ada4452517df245b74b55a96c05209ef8a
|
setup.py
|
setup.py
|
from setuptools import Extension, setup
from Cython.Build import cythonize
import numpy
sourcefiles = [ 'SuchTree/SuchTree.pyx' ]
extensions = [ Extension( 'SuchTree', sourcefiles ) ]
extensions = cythonize( extensions, language_level = "3",
include_path = [ numpy.get_include() ] )
setup(
ext_modules = cythonize( 'SuchTree/SuchTree.pyx' )
)
|
from setuptools import Extension, setup
from Cython.Build import cythonize
import numpy
sourcefiles = [ 'SuchTree/SuchTree.pyx' ]
extensions = [ Extension( 'SuchTree', sourcefiles, include_dirs=[numpy.get_include()]) ]
extensions = cythonize( extensions, language_level = "3" )
setup(
ext_modules = extensions
)
|
Rework Cython to avoid duplicated build, include numpy headers
|
Rework Cython to avoid duplicated build, include numpy headers
|
Python
|
bsd-3-clause
|
ryneches/SuchTree
|
python
|
## Code Before:
from setuptools import Extension, setup
from Cython.Build import cythonize
import numpy
sourcefiles = [ 'SuchTree/SuchTree.pyx' ]
extensions = [ Extension( 'SuchTree', sourcefiles ) ]
extensions = cythonize( extensions, language_level = "3",
include_path = [ numpy.get_include() ] )
setup(
ext_modules = cythonize( 'SuchTree/SuchTree.pyx' )
)
## Instruction:
Rework Cython to avoid duplicated build, include numpy headers
## Code After:
from setuptools import Extension, setup
from Cython.Build import cythonize
import numpy
sourcefiles = [ 'SuchTree/SuchTree.pyx' ]
extensions = [ Extension( 'SuchTree', sourcefiles, include_dirs=[numpy.get_include()]) ]
extensions = cythonize( extensions, language_level = "3" )
setup(
ext_modules = extensions
)
|
# ... existing code ...
sourcefiles = [ 'SuchTree/SuchTree.pyx' ]
extensions = [ Extension( 'SuchTree', sourcefiles, include_dirs=[numpy.get_include()]) ]
extensions = cythonize( extensions, language_level = "3" )
setup(
ext_modules = extensions
)
# ... rest of the code ...
|
9a8f27fb6b3cec373d841b0973ee59f2ddd0b875
|
fabfile.py
|
fabfile.py
|
from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
Use sudo() function for db migration call
|
fabric: Use sudo() function for db migration call
|
Python
|
agpl-3.0
|
RBE-Avionik/skylines,shadowoneau/skylines,RBE-Avionik/skylines,Harry-R/skylines,Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,TobiasLohner/SkyLines,shadowoneau/skylines,RBE-Avionik/skylines,kerel-fs/skylines,kerel-fs/skylines,snip/skylines,skylines-project/skylines,shadowoneau/skylines,Harry-R/skylines,shadowoneau/skylines,snip/skylines,TobiasLohner/SkyLines,Turbo87/skylines,Harry-R/skylines,snip/skylines,skylines-project/skylines,Turbo87/skylines,kerel-fs/skylines,Turbo87/skylines,TobiasLohner/SkyLines,RBE-Avionik/skylines,skylines-project/skylines
|
python
|
## Code Before:
from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
## Instruction:
fabric: Use sudo() function for db migration call
## Code After:
from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
# ... existing code ...
from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
# ... modified code ...
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
# ... rest of the code ...
|
6d50cf83eedcf5f04e2a38ffdc00643722116571
|
geneproduct-common/src/main/java/uk/ac/ebi/quickgo/geneproduct/common/ProteomeMembership.java
|
geneproduct-common/src/main/java/uk/ac/ebi/quickgo/geneproduct/common/ProteomeMembership.java
|
package uk.ac.ebi.quickgo.geneproduct.common;
/**
* An enumeration of the possible states of proteome membership a gene product can have.
* @author Tony Wardell
* Date: 06/03/2018
* Time: 10:33
* Created with IntelliJ IDEA.
*/
public enum ProteomeMembership {
REFERENCE, COMPLETE, NONE, NOT_APPLICABLE
}
|
package uk.ac.ebi.quickgo.geneproduct.common;
/**
* An enumeration of the possible states of proteome membership a gene product can have.
* @author Tony Wardell
* Date: 06/03/2018
* Time: 10:33
* Created with IntelliJ IDEA.
*/
public enum ProteomeMembership {
REFERENCE, COMPLETE, NONE, NOT_APPLICABLE;
/**
* Define the predicates required and order of importance to work out which Proteome membership category is
* applicable.
* @param isProtein is the gene product a protein
* @param isRef is the gene product a reference proteome
* @param isComplete is the gene product a member of a complete proteome.
* @return the ProteomeMembership matching the applied constraints
*/
public static ProteomeMembership membership(boolean isProtein, boolean isRef, boolean isComplete) {
if (!isProtein) {
return NOT_APPLICABLE;
} else if (isRef) {
return REFERENCE;
} else if (isComplete) {
return COMPLETE;
}
return NONE;
}
}
|
Add memberhip() method for selection logic.
|
Add memberhip() method for selection logic.
|
Java
|
apache-2.0
|
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
|
java
|
## Code Before:
package uk.ac.ebi.quickgo.geneproduct.common;
/**
* An enumeration of the possible states of proteome membership a gene product can have.
* @author Tony Wardell
* Date: 06/03/2018
* Time: 10:33
* Created with IntelliJ IDEA.
*/
public enum ProteomeMembership {
REFERENCE, COMPLETE, NONE, NOT_APPLICABLE
}
## Instruction:
Add memberhip() method for selection logic.
## Code After:
package uk.ac.ebi.quickgo.geneproduct.common;
/**
* An enumeration of the possible states of proteome membership a gene product can have.
* @author Tony Wardell
* Date: 06/03/2018
* Time: 10:33
* Created with IntelliJ IDEA.
*/
public enum ProteomeMembership {
REFERENCE, COMPLETE, NONE, NOT_APPLICABLE;
/**
* Define the predicates required and order of importance to work out which Proteome membership category is
* applicable.
* @param isProtein is the gene product a protein
* @param isRef is the gene product a reference proteome
* @param isComplete is the gene product a member of a complete proteome.
* @return the ProteomeMembership matching the applied constraints
*/
public static ProteomeMembership membership(boolean isProtein, boolean isRef, boolean isComplete) {
if (!isProtein) {
return NOT_APPLICABLE;
} else if (isRef) {
return REFERENCE;
} else if (isComplete) {
return COMPLETE;
}
return NONE;
}
}
|
// ... existing code ...
* Created with IntelliJ IDEA.
*/
public enum ProteomeMembership {
REFERENCE, COMPLETE, NONE, NOT_APPLICABLE;
/**
* Define the predicates required and order of importance to work out which Proteome membership category is
* applicable.
* @param isProtein is the gene product a protein
* @param isRef is the gene product a reference proteome
* @param isComplete is the gene product a member of a complete proteome.
* @return the ProteomeMembership matching the applied constraints
*/
public static ProteomeMembership membership(boolean isProtein, boolean isRef, boolean isComplete) {
if (!isProtein) {
return NOT_APPLICABLE;
} else if (isRef) {
return REFERENCE;
} else if (isComplete) {
return COMPLETE;
}
return NONE;
}
}
// ... rest of the code ...
|
0a39c49f370780025da6bf828bf66c1c1ab25131
|
TASKS/TASK_1/sources/vector3d.h
|
TASKS/TASK_1/sources/vector3d.h
|
namespace classes {
struct Point3D {
double x, y, z;
};
typedef const char* err;
class Vector3D {
struct Cheshire;
Cheshire* smile;
static const Point3D nullPoint;
static unsigned int idCounter;
public:
Vector3D(Point3D point = nullPoint);
Vector3D(double x, double y = 0.0, double z = 0.0);
~Vector3D();
double getModule() const;
void print() const;
const Point3D& getPoint() const;
Vector3D copy() const;
void multiplyByScalar(const double);
void normalize();
static Vector3D add(Vector3D&, Vector3D&);
static Vector3D substract(Vector3D&, Vector3D&);
static Vector3D vectorMultiply(Vector3D&, Vector3D&) ;
static double scalarMultiply(Vector3D&, Vector3D&);
static double sin(Vector3D&, Vector3D&);
static double cos(Vector3D&, Vector3D&);
static double angle(Vector3D&, Vector3D&);
};
}
#endif // VECTOR_3D_H
|
namespace classes {
struct Point3D {
double x, y, z;
};
typedef const char* err;
class Vector3D {
struct Cheshire;
Cheshire* smile;
static const Point3D nullPoint;
static unsigned int idCounter;
public:
Vector3D(Point3D point = nullPoint);
Vector3D(double x, double y = 0.0, double z = 0.0);
~Vector3D();
double getModule() const;
void print() const;
const Point3D& getPoint() const;
Vector3D copy() const;
void multiplyByScalar(const double);
void normalize();
};
namespace VectorsManip {
Vector3D add(Vector3D&, Vector3D&);
Vector3D substract(Vector3D&, Vector3D&);
Vector3D vectorMultiply(Vector3D&, Vector3D&) ;
double scalarMultiply(Vector3D&, Vector3D&);
double sin(Vector3D&, Vector3D&);
double cos(Vector3D&, Vector3D&);
double angle(Vector3D&, Vector3D&);
}
}
#endif // VECTOR_3D_H
|
Move static methods outside of class
|
Move static methods outside of class
|
C
|
mit
|
reeFridge/learn-oop-through-cpp
|
c
|
## Code Before:
namespace classes {
struct Point3D {
double x, y, z;
};
typedef const char* err;
class Vector3D {
struct Cheshire;
Cheshire* smile;
static const Point3D nullPoint;
static unsigned int idCounter;
public:
Vector3D(Point3D point = nullPoint);
Vector3D(double x, double y = 0.0, double z = 0.0);
~Vector3D();
double getModule() const;
void print() const;
const Point3D& getPoint() const;
Vector3D copy() const;
void multiplyByScalar(const double);
void normalize();
static Vector3D add(Vector3D&, Vector3D&);
static Vector3D substract(Vector3D&, Vector3D&);
static Vector3D vectorMultiply(Vector3D&, Vector3D&) ;
static double scalarMultiply(Vector3D&, Vector3D&);
static double sin(Vector3D&, Vector3D&);
static double cos(Vector3D&, Vector3D&);
static double angle(Vector3D&, Vector3D&);
};
}
#endif // VECTOR_3D_H
## Instruction:
Move static methods outside of class
## Code After:
namespace classes {
struct Point3D {
double x, y, z;
};
typedef const char* err;
class Vector3D {
struct Cheshire;
Cheshire* smile;
static const Point3D nullPoint;
static unsigned int idCounter;
public:
Vector3D(Point3D point = nullPoint);
Vector3D(double x, double y = 0.0, double z = 0.0);
~Vector3D();
double getModule() const;
void print() const;
const Point3D& getPoint() const;
Vector3D copy() const;
void multiplyByScalar(const double);
void normalize();
};
namespace VectorsManip {
Vector3D add(Vector3D&, Vector3D&);
Vector3D substract(Vector3D&, Vector3D&);
Vector3D vectorMultiply(Vector3D&, Vector3D&) ;
double scalarMultiply(Vector3D&, Vector3D&);
double sin(Vector3D&, Vector3D&);
double cos(Vector3D&, Vector3D&);
double angle(Vector3D&, Vector3D&);
}
}
#endif // VECTOR_3D_H
|
# ... existing code ...
void multiplyByScalar(const double);
void normalize();
};
namespace VectorsManip {
Vector3D add(Vector3D&, Vector3D&);
Vector3D substract(Vector3D&, Vector3D&);
Vector3D vectorMultiply(Vector3D&, Vector3D&) ;
double scalarMultiply(Vector3D&, Vector3D&);
double sin(Vector3D&, Vector3D&);
double cos(Vector3D&, Vector3D&);
double angle(Vector3D&, Vector3D&);
}
}
#endif // VECTOR_3D_H
# ... rest of the code ...
|
f622e11536c4ebf8f82985329d06efc58c2fe60e
|
blog/tests/test_views.py
|
blog/tests/test_views.py
|
from django.test import TestCase
class BlogViewsTestCase(TestCase):
def setUp(self):
|
from django import test
from django.core.urlresolvers import reverse
from blog.models import Post, Category
class BlogViewsTestCase(test.TestCase):
def setUp(self):
# Add parent category and post category
parent = Category(name='Writing', parent=None)
parent.save()
category = Category(name='Thoughts', parent=parent)
category.save()
# Create a draft
_post = Post(title='Random thoughts of the author',
body='Thoughts turned to words.', category=category)
_post.save()
self.draft = _post
# Publish a post
post = Post(title='New thoughts from without',
body='A post fit to be published!', category=category)
post.save()
post.publish()
self.post = post
self.client = test.Client()
def test_index(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
posts = response.context['posts']
self.assertNotIn(self.draft, posts)
self.assertIn(self.post, posts)
def test_post_view(self):
post_url = reverse('blog:post', kwargs=dict(pid=self.post.id,
slug=self.post.slug))
response = self.client.get(post_url)
self.assertEqual(response.status_code, 200)
post = response.context['post']
posts = response.context['posts']
self.assertEqual(post, self.post)
self.assertEqual(posts.count(), 0)
def test_draft_view(self):
draft_url = reverse('blog:post', kwargs=dict(pid=self.draft.id,
slug=self.draft.slug))
response = self.client.get(draft_url)
self.assertEqual(response.status_code, 404)
|
Add tests for blog index view and post view
|
Add tests for blog index view and post view
|
Python
|
mit
|
ajoyoommen/weblog,ajoyoommen/weblog
|
python
|
## Code Before:
from django.test import TestCase
class BlogViewsTestCase(TestCase):
def setUp(self):
## Instruction:
Add tests for blog index view and post view
## Code After:
from django import test
from django.core.urlresolvers import reverse
from blog.models import Post, Category
class BlogViewsTestCase(test.TestCase):
def setUp(self):
# Add parent category and post category
parent = Category(name='Writing', parent=None)
parent.save()
category = Category(name='Thoughts', parent=parent)
category.save()
# Create a draft
_post = Post(title='Random thoughts of the author',
body='Thoughts turned to words.', category=category)
_post.save()
self.draft = _post
# Publish a post
post = Post(title='New thoughts from without',
body='A post fit to be published!', category=category)
post.save()
post.publish()
self.post = post
self.client = test.Client()
def test_index(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
posts = response.context['posts']
self.assertNotIn(self.draft, posts)
self.assertIn(self.post, posts)
def test_post_view(self):
post_url = reverse('blog:post', kwargs=dict(pid=self.post.id,
slug=self.post.slug))
response = self.client.get(post_url)
self.assertEqual(response.status_code, 200)
post = response.context['post']
posts = response.context['posts']
self.assertEqual(post, self.post)
self.assertEqual(posts.count(), 0)
def test_draft_view(self):
draft_url = reverse('blog:post', kwargs=dict(pid=self.draft.id,
slug=self.draft.slug))
response = self.client.get(draft_url)
self.assertEqual(response.status_code, 404)
|
// ... existing code ...
from django import test
from django.core.urlresolvers import reverse
from blog.models import Post, Category
class BlogViewsTestCase(test.TestCase):
def setUp(self):
# Add parent category and post category
parent = Category(name='Writing', parent=None)
parent.save()
category = Category(name='Thoughts', parent=parent)
category.save()
# Create a draft
_post = Post(title='Random thoughts of the author',
body='Thoughts turned to words.', category=category)
_post.save()
self.draft = _post
# Publish a post
post = Post(title='New thoughts from without',
body='A post fit to be published!', category=category)
post.save()
post.publish()
self.post = post
self.client = test.Client()
def test_index(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
posts = response.context['posts']
self.assertNotIn(self.draft, posts)
self.assertIn(self.post, posts)
def test_post_view(self):
post_url = reverse('blog:post', kwargs=dict(pid=self.post.id,
slug=self.post.slug))
response = self.client.get(post_url)
self.assertEqual(response.status_code, 200)
post = response.context['post']
posts = response.context['posts']
self.assertEqual(post, self.post)
self.assertEqual(posts.count(), 0)
def test_draft_view(self):
draft_url = reverse('blog:post', kwargs=dict(pid=self.draft.id,
slug=self.draft.slug))
response = self.client.get(draft_url)
self.assertEqual(response.status_code, 404)
// ... rest of the code ...
|
908064ff40a6a5a036d78b203bf96e7a59ee6ed9
|
subprojects/internal-testing/build.gradle.kts
|
subprojects/internal-testing/build.gradle.kts
|
plugins {
id("gradlebuild.internal.java")
}
dependencies {
implementation(project(":base-services"))
implementation(project(":native"))
implementation(libs.groovy)
implementation(libs.slf4jApi)
implementation(libs.guava)
implementation(libs.commonsLang)
implementation(libs.commonsIo)
implementation(libs.ant)
implementation(libs.asm)
implementation(libs.asmTree)
implementation(libs.junit)
implementation(libs.spock)
implementation(libs.spockJUnit4)
implementation(libs.jsoup)
implementation(libs.testcontainersSpock)
runtimeOnly(libs.bytebuddy)
}
|
plugins {
id("gradlebuild.internal.java")
}
dependencies {
implementation(project(":base-services"))
implementation(project(":native"))
implementation(libs.groovy)
implementation(libs.groovyJson)
implementation(libs.groovyXml)
implementation(libs.slf4jApi)
implementation(libs.guava)
implementation(libs.commonsLang)
implementation(libs.commonsIo)
implementation(libs.ant)
implementation(libs.asm)
implementation(libs.asmTree)
implementation(libs.junit)
implementation(libs.spock)
implementation(libs.spockJUnit4)
implementation(libs.jsoup)
implementation(libs.testcontainersSpock)
runtimeOnly(libs.bytebuddy)
}
|
Upgrade 'internal-testing' to Groovy 3
|
Upgrade 'internal-testing' to Groovy 3
|
Kotlin
|
apache-2.0
|
blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle
|
kotlin
|
## Code Before:
plugins {
id("gradlebuild.internal.java")
}
dependencies {
implementation(project(":base-services"))
implementation(project(":native"))
implementation(libs.groovy)
implementation(libs.slf4jApi)
implementation(libs.guava)
implementation(libs.commonsLang)
implementation(libs.commonsIo)
implementation(libs.ant)
implementation(libs.asm)
implementation(libs.asmTree)
implementation(libs.junit)
implementation(libs.spock)
implementation(libs.spockJUnit4)
implementation(libs.jsoup)
implementation(libs.testcontainersSpock)
runtimeOnly(libs.bytebuddy)
}
## Instruction:
Upgrade 'internal-testing' to Groovy 3
## Code After:
plugins {
id("gradlebuild.internal.java")
}
dependencies {
implementation(project(":base-services"))
implementation(project(":native"))
implementation(libs.groovy)
implementation(libs.groovyJson)
implementation(libs.groovyXml)
implementation(libs.slf4jApi)
implementation(libs.guava)
implementation(libs.commonsLang)
implementation(libs.commonsIo)
implementation(libs.ant)
implementation(libs.asm)
implementation(libs.asmTree)
implementation(libs.junit)
implementation(libs.spock)
implementation(libs.spockJUnit4)
implementation(libs.jsoup)
implementation(libs.testcontainersSpock)
runtimeOnly(libs.bytebuddy)
}
|
# ... existing code ...
implementation(project(":native"))
implementation(libs.groovy)
implementation(libs.groovyJson)
implementation(libs.groovyXml)
implementation(libs.slf4jApi)
implementation(libs.guava)
implementation(libs.commonsLang)
# ... rest of the code ...
|
f332badaeb115c6db8d3f6b1cf97af6155df3cc7
|
include/vkalloc.h
|
include/vkalloc.h
|
struct VkAllocation {
VkDeviceMemory deviceMemory;
uint64_t offset;
uint64_t size;
};
void vkaInit(VkPhysicalDevice physicalDevice, VkDevice device);
void vkaTerminate();
VkAllocation vkAlloc(VkMemoryRequirements requirements);
void vkFree(VkAllocation allocation);
VkAllocation vkHostAlloc(VkMemoryRequirements requirements);
void vkHostFree(VkAllocation allocation);
#endif //VKALLOC_VKALLOC_H
|
struct VkAllocation {
VkDeviceMemory deviceMemory = VK_NULL_HANDLE;
uint64_t offset = 0;
uint64_t size = 0;
};
void vkaInit(VkPhysicalDevice physicalDevice, VkDevice device);
void vkaTerminate();
VkAllocation vkAlloc(VkMemoryRequirements requirements);
void vkFree(VkAllocation allocation);
VkAllocation vkHostAlloc(VkMemoryRequirements requirements);
void vkHostFree(VkAllocation allocation);
#endif //VKALLOC_VKALLOC_H
|
Add default values to VkAllocation
|
Add default values to VkAllocation
|
C
|
mit
|
rhynodegreat/VkAlloc,rhynodegreat/VkAlloc
|
c
|
## Code Before:
struct VkAllocation {
VkDeviceMemory deviceMemory;
uint64_t offset;
uint64_t size;
};
void vkaInit(VkPhysicalDevice physicalDevice, VkDevice device);
void vkaTerminate();
VkAllocation vkAlloc(VkMemoryRequirements requirements);
void vkFree(VkAllocation allocation);
VkAllocation vkHostAlloc(VkMemoryRequirements requirements);
void vkHostFree(VkAllocation allocation);
#endif //VKALLOC_VKALLOC_H
## Instruction:
Add default values to VkAllocation
## Code After:
struct VkAllocation {
VkDeviceMemory deviceMemory = VK_NULL_HANDLE;
uint64_t offset = 0;
uint64_t size = 0;
};
void vkaInit(VkPhysicalDevice physicalDevice, VkDevice device);
void vkaTerminate();
VkAllocation vkAlloc(VkMemoryRequirements requirements);
void vkFree(VkAllocation allocation);
VkAllocation vkHostAlloc(VkMemoryRequirements requirements);
void vkHostFree(VkAllocation allocation);
#endif //VKALLOC_VKALLOC_H
|
# ... existing code ...
struct VkAllocation {
VkDeviceMemory deviceMemory = VK_NULL_HANDLE;
uint64_t offset = 0;
uint64_t size = 0;
};
void vkaInit(VkPhysicalDevice physicalDevice, VkDevice device);
# ... rest of the code ...
|
597a2ec7a6ff0bae0b43a67e8be675017fd1d7f1
|
falafel/mappers/tests/test_current_clocksource.py
|
falafel/mappers/tests/test_current_clocksource.py
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
Enhance coverage of current_closcksource to 100%
|
Enhance coverage of current_closcksource to 100%
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
python
|
## Code Before:
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
## Instruction:
Enhance coverage of current_closcksource to 100%
## Code After:
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
// ... existing code ...
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
// ... rest of the code ...
|
a817afa1580aeb59fcbe837893c9ec8c5e7e0667
|
anygit/clisetup.py
|
anygit/clisetup.py
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
Load the logging config right away so it actually works
|
Load the logging config right away so it actually works
|
Python
|
mit
|
ebroder/anygit,ebroder/anygit
|
python
|
## Code Before:
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
## Instruction:
Load the logging config right away so it actually works
## Code After:
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
# ... existing code ...
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
# ... rest of the code ...
|
1042c0104d33ab2a3a220ef5f91c9e225af8c080
|
include/tcframe/type.h
|
include/tcframe/type.h
|
using std::enable_if;
using std::integral_constant;
using std::is_arithmetic;
using std::is_same;
using std::ostream;
using std::string;
namespace tcframe {
class Variable {
public:
virtual void printTo(ostream& out) = 0;
virtual ~Variable() { };
};
template<typename T>
using RequiresScalar = typename enable_if<integral_constant<bool, is_arithmetic<T>::value || is_same<string, T>::value>::value>::type;
template<typename T>
class Scalar : public Variable {
private:
T* value;
public:
explicit Scalar(T& value) {
this->value = &value;
}
void printTo(ostream& out) {
out << *value;
}
};
}
#endif
|
using std::enable_if;
using std::integral_constant;
using std::is_arithmetic;
using std::is_same;
using std::ostream;
using std::string;
namespace tcframe {
class Variable {
public:
virtual void printTo(ostream& out) = 0;
virtual ~Variable() { };
};
template<typename T>
using RequiresScalar = typename enable_if<is_arithmetic<T>::value || is_same<string, T>::value>::type;
template<typename T>
class Scalar : public Variable {
private:
T* value;
public:
explicit Scalar(T& value) {
this->value = &value;
}
void printTo(ostream& out) {
out << *value;
}
};
}
#endif
|
Simplify SFINAE expression for RequiresScalar
|
Simplify SFINAE expression for RequiresScalar
|
C
|
mit
|
tcframe/tcframe,ia-toki/tcframe,fushar/tcframe,ia-toki/tcframe,tcframe/tcframe,fushar/tcframe
|
c
|
## Code Before:
using std::enable_if;
using std::integral_constant;
using std::is_arithmetic;
using std::is_same;
using std::ostream;
using std::string;
namespace tcframe {
class Variable {
public:
virtual void printTo(ostream& out) = 0;
virtual ~Variable() { };
};
template<typename T>
using RequiresScalar = typename enable_if<integral_constant<bool, is_arithmetic<T>::value || is_same<string, T>::value>::value>::type;
template<typename T>
class Scalar : public Variable {
private:
T* value;
public:
explicit Scalar(T& value) {
this->value = &value;
}
void printTo(ostream& out) {
out << *value;
}
};
}
#endif
## Instruction:
Simplify SFINAE expression for RequiresScalar
## Code After:
using std::enable_if;
using std::integral_constant;
using std::is_arithmetic;
using std::is_same;
using std::ostream;
using std::string;
namespace tcframe {
class Variable {
public:
virtual void printTo(ostream& out) = 0;
virtual ~Variable() { };
};
template<typename T>
using RequiresScalar = typename enable_if<is_arithmetic<T>::value || is_same<string, T>::value>::type;
template<typename T>
class Scalar : public Variable {
private:
T* value;
public:
explicit Scalar(T& value) {
this->value = &value;
}
void printTo(ostream& out) {
out << *value;
}
};
}
#endif
|
...
};
template<typename T>
using RequiresScalar = typename enable_if<is_arithmetic<T>::value || is_same<string, T>::value>::type;
template<typename T>
class Scalar : public Variable {
...
|
2402afe296191d3fddc98212564fb0158cfdcb51
|
upload_redirects.py
|
upload_redirects.py
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
Check for duplicates in existing dataset. Fix reference to dump file.
|
Check for duplicates in existing dataset. Fix reference to dump file.
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
python
|
## Code Before:
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/prod_url_alias.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url']
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
## Instruction:
Check for duplicates in existing dataset. Fix reference to dump file.
## Code After:
import json
from backend.app import app, db
from backend.models import *
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
nid = None
try:
nid = int(redirects[i]['nid'])
except ValueError as e:
tmp = redirects[i]['nid']
if not 'user' in tmp:
tmp = tmp.split('/')
for item in tmp:
try:
nid = int(item)
break
except ValueError:
pass
url = redirects[i]['url']
if nid and not url in old_urls:
redirect = Redirect(nid=nid, old_url=url)
old_urls.append(url)
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
db.session.commit()
print "Error count:", str(error_count)
|
// ... existing code ...
from flask import url_for
# read in json redirect dump
with open('data/nid_url.json', 'r') as f:
redirects = json.loads(f.read())
// ... modified code ...
print len(redirects)
old_urls = []
existing_redirects = Redirect.query.all()
for redirect in existing_redirects:
old_urls.append(redirect.old_url)
error_count = 0
for i in range(len(redirects)):
...
db.session.add(redirect)
else:
error_count += 1
print nid, redirects[i]['url'].encode('utf8')
if i % 500 == 0:
print "saving 500 redirects (" + str(i) + " out of " + str(len(redirects)) + ")"
db.session.commit()
// ... rest of the code ...
|
6e1a211ff1834f8047261d51737afcb0412075b5
|
memleak.py
|
memleak.py
|
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
|
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
if i % 25 == 0:
logging.info(util.nvidia_smi())
|
Add logging of memory use
|
Add logging of memory use
|
Python
|
mit
|
MaestroGraph/sparse-hyper
|
python
|
## Code Before:
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
## Instruction:
Add logging of memory use
## Code After:
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
if i % 25 == 0:
logging.info(util.nvidia_smi())
|
# ... existing code ...
x, _ = hyper.flatten_indices(x, IN, OUT)
if i % 25 == 0:
logging.info(util.nvidia_smi())
# ... rest of the code ...
|
e30b9cfd55b91424de62e5ac9fcdb0464a78f37e
|
testtube/tests/__init__.py
|
testtube/tests/__init__.py
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
Make import mock.Mock or unittest.mock.Mock easier
|
Make import mock.Mock or unittest.mock.Mock easier
|
Python
|
mit
|
thomasw/testtube,beck/testtube,blaix/testtube
|
python
|
## Code Before:
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
## Instruction:
Make import mock.Mock or unittest.mock.Mock easier
## Code After:
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
...
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
...
|
5a785f725d68733561a7e5e82c57655e25439ec8
|
indra/tests/test_grounding_resources.py
|
indra/tests/test_grounding_resources.py
|
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
|
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
|
Remove exceptional namespaces from test
|
Remove exceptional namespaces from test
|
Python
|
bsd-2-clause
|
johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/belpy
|
python
|
## Code Before:
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
## Instruction:
Remove exceptional namespaces from test
## Code After:
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
|
...
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
def test_misgrounding_map_entries():
bad_entries = []
...
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
...
|
d3f5e0e2d6104963237a0626d608cc1b0949b762
|
zounds/learn/functional.py
|
zounds/learn/functional.py
|
import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
|
import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
|
Add an option to also return intermediate example norms
|
Add an option to also return intermediate example norms
|
Python
|
mit
|
JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds
|
python
|
## Code Before:
import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
## Instruction:
Add an option to also return intermediate example norms
## Code After:
import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
|
// ... existing code ...
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
// ... rest of the code ...
|
0f9cb6eb32ce014cb6ae8d24aefed2347efe68d9
|
src/python/cargo/condor/host.py
|
src/python/cargo/condor/host.py
|
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
job = pickle.load(sys.stdin)
job.run()
if __name__ == "__main__":
main()
|
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
with open("job.pickle") as job_file:
job = pickle.load(job_file)
job.run()
if __name__ == "__main__":
main()
|
Load job from a job file instead of stdin.
|
Load job from a job file instead of stdin.
|
Python
|
mit
|
borg-project/cargo,borg-project/cargo
|
python
|
## Code Before:
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
job = pickle.load(sys.stdin)
job.run()
if __name__ == "__main__":
main()
## Instruction:
Load job from a job file instead of stdin.
## Code After:
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
with open("job.pickle") as job_file:
job = pickle.load(job_file)
job.run()
if __name__ == "__main__":
main()
|
// ... existing code ...
open(identifier_path, "w").close()
# load and run the job
with open("job.pickle") as job_file:
job = pickle.load(job_file)
job.run()
// ... rest of the code ...
|
dcd97762b65df37b5f7c4724892ab76bb631762f
|
src/havenctl/havenctl/launcher.py
|
src/havenctl/havenctl/launcher.py
|
import sys
import getopt
from ctl import HavenCtl
def print_version():
import pkg_resources # part of setuptools
version = pkg_resources.require("havenctl")[0].version
print(version)
def print_usage():
print """
Usage: havenctl [-a <remote_address> | --address=<remote_address>]
[-p <remote_port> | --port=<remote_port>] [-hv] cmd
"""
def execute_from_command_line():
try:
opts, args = getopt.getopt(sys.argv[1:], "a:p:hv", \
["address=", "port=", "version", "help"])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
print_usage()
sys.exit()
elif o in ("-v", "--version"):
print_version()
sys.exit()
elif o in ("-a", "--address"):
address = a
elif o in ("-p", "--port"):
port = a
else:
usage()
assert False, "unhandled option"
sys.exit(2)
handle_user_args(address, port, " ".join(args))
def handle_user_args(address, port, cmd):
repl = HavenCtl()
if(address):
repl.onecmd("connect " + str(address) + " " + str(port))
if(cmd):
repl.onecmd(cmd)
else:
repl.cmdloop()
|
import sys
import getopt
from ctl import HavenCtl
def print_version():
import pkg_resources # part of setuptools
version = pkg_resources.require("havenctl")[0].version
print(version)
def print_usage():
print """
Usage: havenctl [-a <remote_address> | --address=<remote_address>]
[-p <remote_port> | --port=<remote_port>] [-hv] cmd
"""
def execute_from_command_line():
address = None
port = 7854
try:
opts, args = getopt.getopt(sys.argv[1:], "a:p:hv", \
["address=", "port=", "version", "help"])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
print_usage()
sys.exit()
elif o in ("-v", "--version"):
print_version()
sys.exit()
elif o in ("-a", "--address"):
address = a
elif o in ("-p", "--port"):
port = a
else:
usage()
assert False, "unhandled option"
sys.exit(2)
remaining = " ".join(args)
handle_user_args(address, port, remaining)
def handle_user_args(address, port, cmd):
repl = HavenCtl()
if(address):
repl.onecmd("connect " + str(address) + " " + str(port))
if(cmd):
repl.onecmd(cmd)
else:
repl.cmdloop()
|
Make plain repl work again.
|
Make plain repl work again.
|
Python
|
apache-2.0
|
fintler/tomatodb,fintler/tomatodb,fintler/tomatodb
|
python
|
## Code Before:
import sys
import getopt
from ctl import HavenCtl
def print_version():
import pkg_resources # part of setuptools
version = pkg_resources.require("havenctl")[0].version
print(version)
def print_usage():
print """
Usage: havenctl [-a <remote_address> | --address=<remote_address>]
[-p <remote_port> | --port=<remote_port>] [-hv] cmd
"""
def execute_from_command_line():
try:
opts, args = getopt.getopt(sys.argv[1:], "a:p:hv", \
["address=", "port=", "version", "help"])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
print_usage()
sys.exit()
elif o in ("-v", "--version"):
print_version()
sys.exit()
elif o in ("-a", "--address"):
address = a
elif o in ("-p", "--port"):
port = a
else:
usage()
assert False, "unhandled option"
sys.exit(2)
handle_user_args(address, port, " ".join(args))
def handle_user_args(address, port, cmd):
repl = HavenCtl()
if(address):
repl.onecmd("connect " + str(address) + " " + str(port))
if(cmd):
repl.onecmd(cmd)
else:
repl.cmdloop()
## Instruction:
Make plain repl work again.
## Code After:
import sys
import getopt
from ctl import HavenCtl
def print_version():
import pkg_resources # part of setuptools
version = pkg_resources.require("havenctl")[0].version
print(version)
def print_usage():
print """
Usage: havenctl [-a <remote_address> | --address=<remote_address>]
[-p <remote_port> | --port=<remote_port>] [-hv] cmd
"""
def execute_from_command_line():
address = None
port = 7854
try:
opts, args = getopt.getopt(sys.argv[1:], "a:p:hv", \
["address=", "port=", "version", "help"])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
print_usage()
sys.exit()
elif o in ("-v", "--version"):
print_version()
sys.exit()
elif o in ("-a", "--address"):
address = a
elif o in ("-p", "--port"):
port = a
else:
usage()
assert False, "unhandled option"
sys.exit(2)
remaining = " ".join(args)
handle_user_args(address, port, remaining)
def handle_user_args(address, port, cmd):
repl = HavenCtl()
if(address):
repl.onecmd("connect " + str(address) + " " + str(port))
if(cmd):
repl.onecmd(cmd)
else:
repl.cmdloop()
|
...
"""
def execute_from_command_line():
address = None
port = 7854
try:
opts, args = getopt.getopt(sys.argv[1:], "a:p:hv", \
["address=", "port=", "version", "help"])
...
assert False, "unhandled option"
sys.exit(2)
remaining = " ".join(args)
handle_user_args(address, port, remaining)
def handle_user_args(address, port, cmd):
repl = HavenCtl()
...
|
16ad5a3f17fdb96f2660019fabbd7bb787ae4ffb
|
pywsd/baseline.py
|
pywsd/baseline.py
|
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
Add pos for max_lemma_count also
|
Add pos for max_lemma_count also
|
Python
|
mit
|
alvations/pywsd,alvations/pywsd
|
python
|
## Code Before:
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
## Instruction:
Add pos for max_lemma_count also
## Code After:
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
...
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
...
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
...
|
33fbc424d725836355c071593042953fb195cff6
|
server/project/apps/core/serializers.py
|
server/project/apps/core/serializers.py
|
from rest_framework import serializers
from .models import Playlist, Track, Favorite
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = '__all__'
class PlaylistSerializer(serializers.ModelSerializer):
tracks = TrackSerializer(many=True)
class Meta:
model = Playlist
fields = ('id', 'playlist_name', 'user_id', 'tracks')
def create(self, validated_data):
tracks_data = validated_data.pop('tracks')
playlist = Playlist.objects.create(**validated_data)
for track_data in tracks_data:
Track.objects.create(**track_data)
return playlist
def update(self, instance, validated_data):
tracks_data = validated_data.pop('tracks')
instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name)
instance.save()
Track.objects.filter(playlist=instance.id).delete()
for track_data in tracks_data:
Track.objects.create(**track_data)
instance.tracks.add(track_id)
instance.save()
return Playlist.objects.get(pk=instance.id)
class FavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = Favorite
fields = '__all__'
|
from rest_framework import serializers
from .models import Playlist, Track, Favorite
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = '__all__'
class PlaylistSerializer(serializers.ModelSerializer):
tracks = TrackSerializer(many=True)
class Meta:
model = Playlist
fields = ('id', 'playlist_name', 'user_id', 'tracks')
def create(self, validated_data):
tracks_data = validated_data.pop('tracks')
playlist = Playlist.objects.create(**validated_data)
for track_data in tracks_data:
Track.objects.create(**track_data)
return playlist
def update(self, instance, validated_data):
tracks_data = validated_data.pop('tracks')
instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name)
instance.save()
Track.objects.filter(playlist=instance.id).delete()
for track_data in tracks_data:
track_id = Track.objects.create(**track_data)
instance.tracks.add(track_id)
instance.save()
return Playlist.objects.get(pk=instance.id)
class FavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = Favorite
fields = '__all__'
|
Add tracks to playlist on update
|
Add tracks to playlist on update
|
Python
|
mit
|
hrr20-over9000/9001,SoundMoose/SoundMoose,SoundMoose/SoundMoose,douvaughn/9001,douvaughn/9001,hxue920/9001,hrr20-over9000/9001,hxue920/9001,CalHoll/SoundMoose,CalHoll/SoundMoose,douvaughn/9001,CalHoll/SoundMoose,hrr20-over9000/9001,hxue920/9001,douvaughn/9001,hxue920/9001,SoundMoose/SoundMoose,SoundMoose/SoundMoose,CalHoll/SoundMoose
|
python
|
## Code Before:
from rest_framework import serializers
from .models import Playlist, Track, Favorite
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = '__all__'
class PlaylistSerializer(serializers.ModelSerializer):
tracks = TrackSerializer(many=True)
class Meta:
model = Playlist
fields = ('id', 'playlist_name', 'user_id', 'tracks')
def create(self, validated_data):
tracks_data = validated_data.pop('tracks')
playlist = Playlist.objects.create(**validated_data)
for track_data in tracks_data:
Track.objects.create(**track_data)
return playlist
def update(self, instance, validated_data):
tracks_data = validated_data.pop('tracks')
instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name)
instance.save()
Track.objects.filter(playlist=instance.id).delete()
for track_data in tracks_data:
Track.objects.create(**track_data)
instance.tracks.add(track_id)
instance.save()
return Playlist.objects.get(pk=instance.id)
class FavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = Favorite
fields = '__all__'
## Instruction:
Add tracks to playlist on update
## Code After:
from rest_framework import serializers
from .models import Playlist, Track, Favorite
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = '__all__'
class PlaylistSerializer(serializers.ModelSerializer):
tracks = TrackSerializer(many=True)
class Meta:
model = Playlist
fields = ('id', 'playlist_name', 'user_id', 'tracks')
def create(self, validated_data):
tracks_data = validated_data.pop('tracks')
playlist = Playlist.objects.create(**validated_data)
for track_data in tracks_data:
Track.objects.create(**track_data)
return playlist
def update(self, instance, validated_data):
tracks_data = validated_data.pop('tracks')
instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name)
instance.save()
Track.objects.filter(playlist=instance.id).delete()
for track_data in tracks_data:
track_id = Track.objects.create(**track_data)
instance.tracks.add(track_id)
instance.save()
return Playlist.objects.get(pk=instance.id)
class FavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = Favorite
fields = '__all__'
|
// ... existing code ...
Track.objects.filter(playlist=instance.id).delete()
for track_data in tracks_data:
track_id = Track.objects.create(**track_data)
instance.tracks.add(track_id)
instance.save()
// ... rest of the code ...
|
669280351b04d61df1de5ff03c4c7a258b37ad32
|
sell/views.py
|
sell/views.py
|
from decimal import Decimal
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from books.models import BookType, Book
from common.bookchooserwizard import BookChooserWizard
class SellWizard(BookChooserWizard):
@property
def page_title(self):
return _("Sell books")
@property
def url_namespace(self):
return "sell"
@property
def session_var_name(self):
return "sell_chosen_books"
@property
def feature_add_new(self):
return True
def process_books_summary(self, session, user, book_list):
for book in book_list:
amount = book['amount']
del book['amount']
user.save()
dbbook = Book(owner=user, accepted=False, sold=False)
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
book_type = BookType(**book)
book_type.save()
dbbook.book_type = book_type
for i in range(0, amount):
dbbook.pk = None
dbbook.save()
return True, None
def success(self, request):
return render(request, 'sell/success.html')
|
from decimal import Decimal
import re
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from books.models import BookType, Book
from common.bookchooserwizard import BookChooserWizard
class SellWizard(BookChooserWizard):
@property
def page_title(self):
return _("Sell books")
@property
def url_namespace(self):
return "sell"
@property
def session_var_name(self):
return "sell_chosen_books"
@property
def feature_add_new(self):
return True
def process_books_summary(self, session, user, book_list):
for book in book_list:
amount = book['amount']
del book['amount']
user.save()
dbbook = Book(owner=user, accepted=False, sold=False)
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['isbn'] = re.sub(r'[^\d.]+', '', book['isbn'])
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
book_type = BookType(**book)
book_type.save()
dbbook.book_type = book_type
for i in range(0, amount):
dbbook.pk = None
dbbook.save()
return True, None
def success(self, request):
return render(request, 'sell/success.html')
|
Delete non-digit characters in ISBN in server side
|
Delete non-digit characters in ISBN in server side
|
Python
|
agpl-3.0
|
m4tx/egielda,m4tx/egielda,m4tx/egielda
|
python
|
## Code Before:
from decimal import Decimal
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from books.models import BookType, Book
from common.bookchooserwizard import BookChooserWizard
class SellWizard(BookChooserWizard):
@property
def page_title(self):
return _("Sell books")
@property
def url_namespace(self):
return "sell"
@property
def session_var_name(self):
return "sell_chosen_books"
@property
def feature_add_new(self):
return True
def process_books_summary(self, session, user, book_list):
for book in book_list:
amount = book['amount']
del book['amount']
user.save()
dbbook = Book(owner=user, accepted=False, sold=False)
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
book_type = BookType(**book)
book_type.save()
dbbook.book_type = book_type
for i in range(0, amount):
dbbook.pk = None
dbbook.save()
return True, None
def success(self, request):
return render(request, 'sell/success.html')
## Instruction:
Delete non-digit characters in ISBN in server side
## Code After:
from decimal import Decimal
import re
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from books.models import BookType, Book
from common.bookchooserwizard import BookChooserWizard
class SellWizard(BookChooserWizard):
@property
def page_title(self):
return _("Sell books")
@property
def url_namespace(self):
return "sell"
@property
def session_var_name(self):
return "sell_chosen_books"
@property
def feature_add_new(self):
return True
def process_books_summary(self, session, user, book_list):
for book in book_list:
amount = book['amount']
del book['amount']
user.save()
dbbook = Book(owner=user, accepted=False, sold=False)
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['isbn'] = re.sub(r'[^\d.]+', '', book['isbn'])
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
book_type = BookType(**book)
book_type.save()
dbbook.book_type = book_type
for i in range(0, amount):
dbbook.pk = None
dbbook.save()
return True, None
def success(self, request):
return render(request, 'sell/success.html')
|
// ... existing code ...
from decimal import Decimal
import re
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
// ... modified code ...
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['isbn'] = re.sub(r'[^\d.]+', '', book['isbn'])
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
// ... rest of the code ...
|
ab5bc2ec06f22d90a0529531ae6c2f19a359ad83
|
tests/argparse.c
|
tests/argparse.c
|
static void args_empty(void **state) {
args *args = *state;
assert_null(args->opts);
assert_null(args->operands);
}
static void add_option_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_option(args, NULL)
);
}
static void empty_option_fail(void **state) {
args *args = *state;
option *opt = option_new("", "", "");
assert_int_equal(
ARGPARSE_EMPTY_OPTION,
args_add_option(args, opt)
);
option_free(opt);
}
static void add_operand_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_operand(args, NULL)
);
}
int main() {
const struct CMUnitTest tests[] = {
cmocka_unit_test(args_empty),
cmocka_unit_test(add_option_passed_null),
cmocka_unit_test(empty_option_fail),
cmocka_unit_test(add_operand_passed_null),
};
return cmocka_run_group_tests(tests, common_setup, common_teardown);
}
|
static void args_empty(void **state) {
args *args = *state;
assert_null(args->opts);
assert_null(args->operands);
}
static void add_option_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_option(args, NULL)
);
}
static void empty_option_fail(void **state) {
args *args = *state;
option *opt = option_new("", "", "");
assert_int_equal(
ARGPARSE_EMPTY_OPTION,
args_add_option(args, opt)
);
option_free(opt);
}
static void add_operand_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_operand(args, NULL)
);
}
int main() {
#define TEST(test) cmocka_unit_test_setup_teardown( \
test, \
common_setup, \
common_teardown \
)
const struct CMUnitTest tests[] = {
TEST(args_empty),
TEST(add_option_passed_null),
TEST(empty_option_fail),
TEST(add_operand_passed_null),
};
return cmocka_run_group_tests(tests, NULL, NULL);
}
|
Use a TEST() macro to run common_{setup,teardown}
|
Use a TEST() macro to run common_{setup,teardown}
|
C
|
mit
|
ntnn/libargparse,ntnn/libargparse
|
c
|
## Code Before:
static void args_empty(void **state) {
args *args = *state;
assert_null(args->opts);
assert_null(args->operands);
}
static void add_option_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_option(args, NULL)
);
}
static void empty_option_fail(void **state) {
args *args = *state;
option *opt = option_new("", "", "");
assert_int_equal(
ARGPARSE_EMPTY_OPTION,
args_add_option(args, opt)
);
option_free(opt);
}
static void add_operand_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_operand(args, NULL)
);
}
int main() {
const struct CMUnitTest tests[] = {
cmocka_unit_test(args_empty),
cmocka_unit_test(add_option_passed_null),
cmocka_unit_test(empty_option_fail),
cmocka_unit_test(add_operand_passed_null),
};
return cmocka_run_group_tests(tests, common_setup, common_teardown);
}
## Instruction:
Use a TEST() macro to run common_{setup,teardown}
## Code After:
static void args_empty(void **state) {
args *args = *state;
assert_null(args->opts);
assert_null(args->operands);
}
static void add_option_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_option(args, NULL)
);
}
static void empty_option_fail(void **state) {
args *args = *state;
option *opt = option_new("", "", "");
assert_int_equal(
ARGPARSE_EMPTY_OPTION,
args_add_option(args, opt)
);
option_free(opt);
}
static void add_operand_passed_null(void **state) {
args *args = *state;
assert_int_equal(
ARGPARSE_PASSED_NULL,
args_add_operand(args, NULL)
);
}
int main() {
#define TEST(test) cmocka_unit_test_setup_teardown( \
test, \
common_setup, \
common_teardown \
)
const struct CMUnitTest tests[] = {
TEST(args_empty),
TEST(add_option_passed_null),
TEST(empty_option_fail),
TEST(add_operand_passed_null),
};
return cmocka_run_group_tests(tests, NULL, NULL);
}
|
...
}
int main() {
#define TEST(test) cmocka_unit_test_setup_teardown( \
test, \
common_setup, \
common_teardown \
)
const struct CMUnitTest tests[] = {
TEST(args_empty),
TEST(add_option_passed_null),
TEST(empty_option_fail),
TEST(add_operand_passed_null),
};
return cmocka_run_group_tests(tests, NULL, NULL);
}
...
|
4e12aea0a5479bad8289cbf6c9f460931d51f701
|
database.py
|
database.py
|
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
|
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
self.db.autocommit(True)
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
|
Add autocommit to 1 to avoid select cache ¿WTF?
|
Add autocommit to 1 to avoid select cache ¿WTF?
|
Python
|
agpl-3.0
|
p4u/projecte_frigos,p4u/projecte_frigos,p4u/projecte_frigos,p4u/projecte_frigos
|
python
|
## Code Before:
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
## Instruction:
Add autocommit to 1 to avoid select cache ¿WTF?
## Code After:
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
self.db.autocommit(True)
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
|
// ... existing code ...
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
self.db.autocommit(True)
def insert(self,txt):
dbc = self.db.cursor()
// ... rest of the code ...
|
c35526a4914675c120f0cffb239adc3cc12ce6a5
|
core/src/ethanjones/modularworld/networking/server/ServerNetworking.java
|
core/src/ethanjones/modularworld/networking/server/ServerNetworking.java
|
package ethanjones.modularworld.networking.server;
import com.badlogic.gdx.net.Socket;
import ethanjones.modularworld.core.logging.Log;
import ethanjones.modularworld.networking.common.Networking;
import ethanjones.modularworld.networking.common.packet.Packet;
import ethanjones.modularworld.networking.common.socket.SocketMonitor;
import java.util.HashMap;
public class ServerNetworking extends Networking {
private HashMap<Socket, SocketMonitor> sockets;
private SocketMonitorServer serverSocketMonitor;
public ServerNetworking(int port) {
super(port);
}
public void start() {
Log.info("Starting Server Networking");
sockets = new HashMap<Socket, SocketMonitor>();
serverSocketMonitor = new SocketMonitorServer(port);
serverSocketMonitor.start();
}
@Override
public void stop() {
Log.info("Stopping Server Networking");
serverSocketMonitor.dispose();
for (SocketMonitor socketMonitor : sockets.values()) {
socketMonitor.dispose();
}
}
protected synchronized void accepted(Socket socket) {
sockets.put(socket, new SocketMonitor(socket, this));
Log.info("Successfully connected to " + socket.getRemoteAddress());
}
@Override
public void received(Packet packet, SocketMonitor socketMonitor) {
}
@Override
public void disconnected(SocketMonitor socketMonitor, Exception e) {
super.disconnected(socketMonitor, e);
sockets.remove(socketMonitor.getSocket());
}
}
|
package ethanjones.modularworld.networking.server;
import com.badlogic.gdx.net.Socket;
import com.badlogic.gdx.utils.Array;
import ethanjones.modularworld.core.logging.Log;
import ethanjones.modularworld.networking.common.Networking;
import ethanjones.modularworld.networking.common.packet.Packet;
import ethanjones.modularworld.networking.common.socket.SocketMonitor;
public class ServerNetworking extends Networking {
private Array<SocketMonitor> sockets;
private SocketMonitorServer serverSocketMonitor;
public ServerNetworking(int port) {
super(port);
}
public void start() {
Log.info("Starting Server Networking");
sockets = new Array<SocketMonitor>();
serverSocketMonitor = new SocketMonitorServer(port);
serverSocketMonitor.start();
}
@Override
public void stop() {
Log.info("Stopping Server Networking");
serverSocketMonitor.dispose();
for (SocketMonitor socketMonitor : sockets) {
socketMonitor.dispose();
}
}
protected synchronized void accepted(Socket socket) {
sockets.add(new SocketMonitor(socket, this));
Log.info("Successfully connected to " + socket.getRemoteAddress());
}
@Override
public void received(Packet packet, SocketMonitor socketMonitor) {
}
@Override
public void disconnected(SocketMonitor socketMonitor, Exception e) {
super.disconnected(socketMonitor, e);
sockets.removeValue(socketMonitor, true);
}
}
|
Use array instead of hashmap in Server Networking
|
Use array instead of hashmap in Server Networking
|
Java
|
mit
|
RedTroop/Cubes_2,RedTroop/Cubes_2,ictrobot/Cubes,ictrobot/Cubes
|
java
|
## Code Before:
package ethanjones.modularworld.networking.server;
import com.badlogic.gdx.net.Socket;
import ethanjones.modularworld.core.logging.Log;
import ethanjones.modularworld.networking.common.Networking;
import ethanjones.modularworld.networking.common.packet.Packet;
import ethanjones.modularworld.networking.common.socket.SocketMonitor;
import java.util.HashMap;
public class ServerNetworking extends Networking {
private HashMap<Socket, SocketMonitor> sockets;
private SocketMonitorServer serverSocketMonitor;
public ServerNetworking(int port) {
super(port);
}
public void start() {
Log.info("Starting Server Networking");
sockets = new HashMap<Socket, SocketMonitor>();
serverSocketMonitor = new SocketMonitorServer(port);
serverSocketMonitor.start();
}
@Override
public void stop() {
Log.info("Stopping Server Networking");
serverSocketMonitor.dispose();
for (SocketMonitor socketMonitor : sockets.values()) {
socketMonitor.dispose();
}
}
protected synchronized void accepted(Socket socket) {
sockets.put(socket, new SocketMonitor(socket, this));
Log.info("Successfully connected to " + socket.getRemoteAddress());
}
@Override
public void received(Packet packet, SocketMonitor socketMonitor) {
}
@Override
public void disconnected(SocketMonitor socketMonitor, Exception e) {
super.disconnected(socketMonitor, e);
sockets.remove(socketMonitor.getSocket());
}
}
## Instruction:
Use array instead of hashmap in Server Networking
## Code After:
package ethanjones.modularworld.networking.server;
import com.badlogic.gdx.net.Socket;
import com.badlogic.gdx.utils.Array;
import ethanjones.modularworld.core.logging.Log;
import ethanjones.modularworld.networking.common.Networking;
import ethanjones.modularworld.networking.common.packet.Packet;
import ethanjones.modularworld.networking.common.socket.SocketMonitor;
public class ServerNetworking extends Networking {
private Array<SocketMonitor> sockets;
private SocketMonitorServer serverSocketMonitor;
public ServerNetworking(int port) {
super(port);
}
public void start() {
Log.info("Starting Server Networking");
sockets = new Array<SocketMonitor>();
serverSocketMonitor = new SocketMonitorServer(port);
serverSocketMonitor.start();
}
@Override
public void stop() {
Log.info("Stopping Server Networking");
serverSocketMonitor.dispose();
for (SocketMonitor socketMonitor : sockets) {
socketMonitor.dispose();
}
}
protected synchronized void accepted(Socket socket) {
sockets.add(new SocketMonitor(socket, this));
Log.info("Successfully connected to " + socket.getRemoteAddress());
}
@Override
public void received(Packet packet, SocketMonitor socketMonitor) {
}
@Override
public void disconnected(SocketMonitor socketMonitor, Exception e) {
super.disconnected(socketMonitor, e);
sockets.removeValue(socketMonitor, true);
}
}
|
// ... existing code ...
package ethanjones.modularworld.networking.server;
import com.badlogic.gdx.net.Socket;
import com.badlogic.gdx.utils.Array;
import ethanjones.modularworld.core.logging.Log;
import ethanjones.modularworld.networking.common.Networking;
import ethanjones.modularworld.networking.common.packet.Packet;
import ethanjones.modularworld.networking.common.socket.SocketMonitor;
public class ServerNetworking extends Networking {
private Array<SocketMonitor> sockets;
private SocketMonitorServer serverSocketMonitor;
public ServerNetworking(int port) {
// ... modified code ...
public void start() {
Log.info("Starting Server Networking");
sockets = new Array<SocketMonitor>();
serverSocketMonitor = new SocketMonitorServer(port);
serverSocketMonitor.start();
}
...
public void stop() {
Log.info("Stopping Server Networking");
serverSocketMonitor.dispose();
for (SocketMonitor socketMonitor : sockets) {
socketMonitor.dispose();
}
}
protected synchronized void accepted(Socket socket) {
sockets.add(new SocketMonitor(socket, this));
Log.info("Successfully connected to " + socket.getRemoteAddress());
}
...
@Override
public void disconnected(SocketMonitor socketMonitor, Exception e) {
super.disconnected(socketMonitor, e);
sockets.removeValue(socketMonitor, true);
}
}
// ... rest of the code ...
|
dfdb3a90d41a8d784e39321f64f2c9cca2bbcfb3
|
proxygen/lib/utils/test/MockTime.h
|
proxygen/lib/utils/test/MockTime.h
|
/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#pragma once
#include <glog/logging.h>
#include <proxygen/lib/utils/Time.h>
namespace proxygen {
class MockTimeUtil : public TimeUtil {
public:
void advance(std::chrono::milliseconds ms) {
t_ += ms;
}
void setCurrentTime(TimePoint t) {
CHECK(t.time_since_epoch() > t_.time_since_epoch())
<< "Time can not move backwards";
t_ = t;
}
void verifyAndClear() {
}
TimePoint now() const override {
return t_;
}
private:
TimePoint t_;
};
}
|
/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#pragma once
#include <glog/logging.h>
#include <proxygen/lib/utils/Time.h>
namespace proxygen {
template <typename ClockType = std::chrono::steady_clock>
class MockTimeUtilGeneric : public TimeUtilGeneric<ClockType> {
public:
void advance(std::chrono::milliseconds ms) {
t_ += ms;
}
void setCurrentTime(std::chrono::time_point<ClockType> t) {
CHECK(t.time_since_epoch() > t_.time_since_epoch())
<< "Time can not move backwards";
t_ = t;
}
void verifyAndClear() {
}
std::chrono::time_point<ClockType> now() const override {
return t_;
}
private:
std::chrono::time_point<ClockType> t_;
};
using MockTimeUtil = MockTimeUtilGeneric<>;
}
|
Expire cached TLS session tickets using tlsext_tick_lifetime_hint
|
Expire cached TLS session tickets using tlsext_tick_lifetime_hint
Summary:
Our current TLS cache in liger does not respect timeout hints. We should
start doing that because it will limit certain kinds of attacks if an
attacker gets access to a master key.
Test Plan: Added new test in SSLSessionPersistentCacheTest to test expiration
Reviewed By: [email protected]
Subscribers: bmatheny, seanc, yfeldblum, devonharris
FB internal diff: D2299744
Tasks: 7633098
Signature: t1:2299744:1439331830:9d0770149e49b6094ca61bac4e1e4ef16938c4dc
|
C
|
bsd-3-clause
|
LilMeyer/proxygen,songfj/proxygen,raphaelamorim/proxygen,raphaelamorim/proxygen,raphaelamorim/proxygen,KublaikhanGeek/proxygen,supriyantomaftuh/proxygen,hiproz/proxygen,pueril/proxygen,hongliangzhao/proxygen,raphaelamorim/proxygen,Werror/proxygen,chenmoshushi/proxygen,supriyantomaftuh/proxygen,hiproz/proxygen,jgli/proxygen,chenmoshushi/proxygen,Orvid/proxygen,chenmoshushi/proxygen,LilMeyer/proxygen,hnutank163/proxygen,zhiweicai/proxygen,songfj/proxygen,jgli/proxygen,KublaikhanGeek/proxygen,hiproz/proxygen,Werror/proxygen,songfj/proxygen,hongliangzhao/proxygen,zhiweicai/proxygen,pueril/proxygen,supriyantomaftuh/proxygen,Werror/proxygen,pueril/proxygen,zhiweicai/proxygen,Orvid/proxygen,hnutank163/proxygen,hiproz/proxygen,KublaikhanGeek/proxygen,jgli/proxygen,jgli/proxygen,hongliangzhao/proxygen,hnutank163/proxygen,fqihangf/proxygen,KublaikhanGeek/proxygen,chenmoshushi/proxygen,fqihangf/proxygen,fqihangf/proxygen,pueril/proxygen,supriyantomaftuh/proxygen,Orvid/proxygen,hongliangzhao/proxygen,zhiweicai/proxygen,LilMeyer/proxygen,Werror/proxygen,LilMeyer/proxygen,fqihangf/proxygen,Orvid/proxygen,songfj/proxygen,hnutank163/proxygen
|
c
|
## Code Before:
/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#pragma once
#include <glog/logging.h>
#include <proxygen/lib/utils/Time.h>
namespace proxygen {
class MockTimeUtil : public TimeUtil {
public:
void advance(std::chrono::milliseconds ms) {
t_ += ms;
}
void setCurrentTime(TimePoint t) {
CHECK(t.time_since_epoch() > t_.time_since_epoch())
<< "Time can not move backwards";
t_ = t;
}
void verifyAndClear() {
}
TimePoint now() const override {
return t_;
}
private:
TimePoint t_;
};
}
## Instruction:
Expire cached TLS session tickets using tlsext_tick_lifetime_hint
Summary:
Our current TLS cache in liger does not respect timeout hints. We should
start doing that because it will limit certain kinds of attacks if an
attacker gets access to a master key.
Test Plan: Added new test in SSLSessionPersistentCacheTest to test expiration
Reviewed By: [email protected]
Subscribers: bmatheny, seanc, yfeldblum, devonharris
FB internal diff: D2299744
Tasks: 7633098
Signature: t1:2299744:1439331830:9d0770149e49b6094ca61bac4e1e4ef16938c4dc
## Code After:
/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#pragma once
#include <glog/logging.h>
#include <proxygen/lib/utils/Time.h>
namespace proxygen {
template <typename ClockType = std::chrono::steady_clock>
class MockTimeUtilGeneric : public TimeUtilGeneric<ClockType> {
public:
void advance(std::chrono::milliseconds ms) {
t_ += ms;
}
void setCurrentTime(std::chrono::time_point<ClockType> t) {
CHECK(t.time_since_epoch() > t_.time_since_epoch())
<< "Time can not move backwards";
t_ = t;
}
void verifyAndClear() {
}
std::chrono::time_point<ClockType> now() const override {
return t_;
}
private:
std::chrono::time_point<ClockType> t_;
};
using MockTimeUtil = MockTimeUtilGeneric<>;
}
|
// ... existing code ...
namespace proxygen {
template <typename ClockType = std::chrono::steady_clock>
class MockTimeUtilGeneric : public TimeUtilGeneric<ClockType> {
public:
void advance(std::chrono::milliseconds ms) {
// ... modified code ...
t_ += ms;
}
void setCurrentTime(std::chrono::time_point<ClockType> t) {
CHECK(t.time_since_epoch() > t_.time_since_epoch())
<< "Time can not move backwards";
t_ = t;
...
void verifyAndClear() {
}
std::chrono::time_point<ClockType> now() const override {
return t_;
}
private:
std::chrono::time_point<ClockType> t_;
};
using MockTimeUtil = MockTimeUtilGeneric<>;
}
// ... rest of the code ...
|
de9e8ab1a91e2a0e69971f9c23377f97e717b048
|
app/__init__.py
|
app/__init__.py
|
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
|
import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
|
Add additional application for gunicorn.
|
Add additional application for gunicorn.
|
Python
|
mpl-2.0
|
mrf345/FQM,mrf345/FQM,mrf345/FQM,mrf345/FQM
|
python
|
## Code Before:
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
## Instruction:
Add additional application for gunicorn.
## Code After:
import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
|
...
import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
...
|
9baae7a5c633399fe25ca6961e992b50adcd72b4
|
jacquard/service/base.py
|
jacquard/service/base.py
|
import abc
import copy
import werkzeug.routing
class Endpoint(metaclass=abc.ABCMeta):
@abc.abstractproperty
def url(self):
pass
@abc.abstractclassmethod
def handle(self, **kwargs):
pass
def __call__(self, **kwargs):
return self.handle(**kwargs)
@property
def defaults(self):
return {}
def build_rule(self, name):
return werkzeug.routing.Rule(
self.url,
defaults=self.defaults,
endpoint=self,
)
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance.config = config
instance.request = request
instance.reverse = reverse
return instance
|
import abc
import copy
import werkzeug.routing
class Endpoint(metaclass=abc.ABCMeta):
@abc.abstractproperty
def url(self):
pass
@abc.abstractclassmethod
def handle(self, **kwargs):
pass
def __call__(self, **kwargs):
return self.handle(**kwargs)
@property
def defaults(self):
return {}
def build_rule(self, name):
return werkzeug.routing.Rule(
self.url,
defaults=self.defaults,
endpoint=self,
)
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance._config = config
instance._request = request
instance._reverse = reverse
return instance
@property
def config(self):
try:
return self._config
except AttributeError:
raise AttributeError(
"Unbound endpoint: `config` is only available on bound "
"endpoints",
)
@property
def request(self):
try:
return self._request
except AttributeError:
raise AttributeError(
"Unbound endpoint: `request` is only available on bound "
"endpoints",
)
def reverse(self, name, **kwargs):
try:
reverse = self.reverse
except AttributeError:
raise AttributeError(
"Unbound endpoint: `reverse` is only available on bound "
"endpoints",
)
return reverse(name, **kwargs)
|
Use a level of indirection for helpful error messages
|
Use a level of indirection for helpful error messages
|
Python
|
mit
|
prophile/jacquard,prophile/jacquard
|
python
|
## Code Before:
import abc
import copy
import werkzeug.routing
class Endpoint(metaclass=abc.ABCMeta):
@abc.abstractproperty
def url(self):
pass
@abc.abstractclassmethod
def handle(self, **kwargs):
pass
def __call__(self, **kwargs):
return self.handle(**kwargs)
@property
def defaults(self):
return {}
def build_rule(self, name):
return werkzeug.routing.Rule(
self.url,
defaults=self.defaults,
endpoint=self,
)
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance.config = config
instance.request = request
instance.reverse = reverse
return instance
## Instruction:
Use a level of indirection for helpful error messages
## Code After:
import abc
import copy
import werkzeug.routing
class Endpoint(metaclass=abc.ABCMeta):
@abc.abstractproperty
def url(self):
pass
@abc.abstractclassmethod
def handle(self, **kwargs):
pass
def __call__(self, **kwargs):
return self.handle(**kwargs)
@property
def defaults(self):
return {}
def build_rule(self, name):
return werkzeug.routing.Rule(
self.url,
defaults=self.defaults,
endpoint=self,
)
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance._config = config
instance._request = request
instance._reverse = reverse
return instance
@property
def config(self):
try:
return self._config
except AttributeError:
raise AttributeError(
"Unbound endpoint: `config` is only available on bound "
"endpoints",
)
@property
def request(self):
try:
return self._request
except AttributeError:
raise AttributeError(
"Unbound endpoint: `request` is only available on bound "
"endpoints",
)
def reverse(self, name, **kwargs):
try:
reverse = self.reverse
except AttributeError:
raise AttributeError(
"Unbound endpoint: `reverse` is only available on bound "
"endpoints",
)
return reverse(name, **kwargs)
|
# ... existing code ...
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance._config = config
instance._request = request
instance._reverse = reverse
return instance
@property
def config(self):
try:
return self._config
except AttributeError:
raise AttributeError(
"Unbound endpoint: `config` is only available on bound "
"endpoints",
)
@property
def request(self):
try:
return self._request
except AttributeError:
raise AttributeError(
"Unbound endpoint: `request` is only available on bound "
"endpoints",
)
def reverse(self, name, **kwargs):
try:
reverse = self.reverse
except AttributeError:
raise AttributeError(
"Unbound endpoint: `reverse` is only available on bound "
"endpoints",
)
return reverse(name, **kwargs)
# ... rest of the code ...
|
8e7f793abc012e136fa5ec0f2c003704ab98f751
|
src/nodeconductor_assembly_waldur/experts/filters.py
|
src/nodeconductor_assembly_waldur/experts/filters.py
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
Allow to filter expert bids by a customer
|
Allow to filter expert bids by a customer [WAL-1169]
|
Python
|
mit
|
opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
python
|
## Code Before:
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
## Instruction:
Allow to filter expert bids by a customer [WAL-1169]
## Code After:
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
...
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
customer = core_filters.URLFilter(view_name='customer-detail', name='team__customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='team__customer__uuid')
class Meta(object):
model = models.ExpertBid
...
|
2b9da0b1eda705121748485ca4ccb42488013540
|
src/main/java/com/pseudo_sudo/openstorage/OpenStorage.java
|
src/main/java/com/pseudo_sudo/openstorage/OpenStorage.java
|
package com.pseudo_sudo.openstorage;
import com.pseudo_sudo.openstorage.configuration.ConfigurationController;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
@Mod(modid=OpenStorage.ID, name=OpenStorage.NAME, version=OpenStorage.VERSION)
public class OpenStorage {
public static final String ID = "OpenStorage";
public static final String ID_LOWER = ID.toLowerCase();
public static final String NAME = "OpenStorage";
public static final String VERSION = "1.7.10-0.1";
private ConfigurationController configurationController;
@Mod.Instance(OpenStorage.ID)
public static OpenStorage instance;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
this.configurationController = new ConfigurationController(event.getSuggestedConfigurationFile());
}
}
|
package com.pseudo_sudo.openstorage;
import com.pseudo_sudo.openstorage.configuration.ConfigurationController;
import com.pseudo_sudo.openstorage.init.ModBlocks;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
@Mod(modid=OpenStorage.ID, name=OpenStorage.NAME, version=OpenStorage.VERSION)
public class OpenStorage {
public static final String ID = "OpenStorage";
public static final String ID_LOWER = ID.toLowerCase();
public static final String NAME = "OpenStorage";
public static final String VERSION = "1.7.10-0.1";
private ConfigurationController configurationController;
@Mod.Instance(OpenStorage.ID)
public static OpenStorage instance;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
this.configurationController = new ConfigurationController(event.getSuggestedConfigurationFile());
ModBlocks.init();
}
}
|
Call init to add blocks.
|
Call init to add blocks.
|
Java
|
mit
|
PseudoSudoLP/OpenStorage
|
java
|
## Code Before:
package com.pseudo_sudo.openstorage;
import com.pseudo_sudo.openstorage.configuration.ConfigurationController;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
@Mod(modid=OpenStorage.ID, name=OpenStorage.NAME, version=OpenStorage.VERSION)
public class OpenStorage {
public static final String ID = "OpenStorage";
public static final String ID_LOWER = ID.toLowerCase();
public static final String NAME = "OpenStorage";
public static final String VERSION = "1.7.10-0.1";
private ConfigurationController configurationController;
@Mod.Instance(OpenStorage.ID)
public static OpenStorage instance;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
this.configurationController = new ConfigurationController(event.getSuggestedConfigurationFile());
}
}
## Instruction:
Call init to add blocks.
## Code After:
package com.pseudo_sudo.openstorage;
import com.pseudo_sudo.openstorage.configuration.ConfigurationController;
import com.pseudo_sudo.openstorage.init.ModBlocks;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
@Mod(modid=OpenStorage.ID, name=OpenStorage.NAME, version=OpenStorage.VERSION)
public class OpenStorage {
public static final String ID = "OpenStorage";
public static final String ID_LOWER = ID.toLowerCase();
public static final String NAME = "OpenStorage";
public static final String VERSION = "1.7.10-0.1";
private ConfigurationController configurationController;
@Mod.Instance(OpenStorage.ID)
public static OpenStorage instance;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
this.configurationController = new ConfigurationController(event.getSuggestedConfigurationFile());
ModBlocks.init();
}
}
|
# ... existing code ...
package com.pseudo_sudo.openstorage;
import com.pseudo_sudo.openstorage.configuration.ConfigurationController;
import com.pseudo_sudo.openstorage.init.ModBlocks;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
# ... modified code ...
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
this.configurationController = new ConfigurationController(event.getSuggestedConfigurationFile());
ModBlocks.init();
}
}
# ... rest of the code ...
|
9c48a952435797ead8239b7e0c9d5eeda1d7f999
|
lib/libmid/errstr.c
|
lib/libmid/errstr.c
|
enum { Bufsz = 1024 };
static char curerr[Bufsz + 1];
void seterrstr(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
vsnprintf(curerr, Bufsz + 1, fmt, ap);
va_end(ap);
}
const char *miderrstr(void){
int err = errno;
if (curerr[0] != '\0') {
static char retbuf[Bufsz + 1];
strncpy(retbuf, curerr, Bufsz);
retbuf[Bufsz] = '\0';
curerr[0] = '\0';
return retbuf;
}
const char *e = SDL_GetError();
if(e[0] != '\0')
return e;
return strerror(err);
}
|
enum { Bufsz = 1024 };
static char curerr[Bufsz];
void seterrstr(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
vsnprintf(curerr, Bufsz, fmt, ap);
va_end(ap);
}
const char *miderrstr(void){
int err = errno;
if (curerr[0] != '\0') {
static char retbuf[Bufsz];
strncpy(retbuf, curerr, Bufsz - 1);
retbuf[Bufsz - 1] = '\0';
curerr[0] = '\0';
return retbuf;
}
const char *e = SDL_GetError();
if(e[0] != '\0')
return e;
return strerror(err);
}
|
Change buffer sizes to ease stk5.
|
Change buffer sizes to ease stk5.
|
C
|
mit
|
velour/mid,velour/mid,velour/mid
|
c
|
## Code Before:
enum { Bufsz = 1024 };
static char curerr[Bufsz + 1];
void seterrstr(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
vsnprintf(curerr, Bufsz + 1, fmt, ap);
va_end(ap);
}
const char *miderrstr(void){
int err = errno;
if (curerr[0] != '\0') {
static char retbuf[Bufsz + 1];
strncpy(retbuf, curerr, Bufsz);
retbuf[Bufsz] = '\0';
curerr[0] = '\0';
return retbuf;
}
const char *e = SDL_GetError();
if(e[0] != '\0')
return e;
return strerror(err);
}
## Instruction:
Change buffer sizes to ease stk5.
## Code After:
enum { Bufsz = 1024 };
static char curerr[Bufsz];
void seterrstr(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
vsnprintf(curerr, Bufsz, fmt, ap);
va_end(ap);
}
const char *miderrstr(void){
int err = errno;
if (curerr[0] != '\0') {
static char retbuf[Bufsz];
strncpy(retbuf, curerr, Bufsz - 1);
retbuf[Bufsz - 1] = '\0';
curerr[0] = '\0';
return retbuf;
}
const char *e = SDL_GetError();
if(e[0] != '\0')
return e;
return strerror(err);
}
|
// ... existing code ...
enum { Bufsz = 1024 };
static char curerr[Bufsz];
void seterrstr(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
vsnprintf(curerr, Bufsz, fmt, ap);
va_end(ap);
}
// ... modified code ...
int err = errno;
if (curerr[0] != '\0') {
static char retbuf[Bufsz];
strncpy(retbuf, curerr, Bufsz - 1);
retbuf[Bufsz - 1] = '\0';
curerr[0] = '\0';
return retbuf;
}
// ... rest of the code ...
|
2b1dadb57cce89f12e825dc24a2136fe27a8d0db
|
cattr/function_dispatch.py
|
cattr/function_dispatch.py
|
import attr
@attr.s(slots=True)
class FunctionDispatch(object):
"""
FunctionDispatch is similar to functools.singledispatch, but
instead dispatches based on functions that take the type of the
first argument in the method, and return True or False.
objects that help determine dispatch should be instantiated objects.
"""
_handler_pairs = attr.ib(init=False, default=attr.Factory(list))
_cache = attr.ib(init=False, default=attr.Factory(dict))
def register(self, can_handle, func):
self._handler_pairs.insert(0, (can_handle, func))
self._cache.clear()
def dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
try:
return self._cache[typ]
except KeyError:
self._cache[typ] = self._dispatch(typ)
return self._cache[typ]
def _dispatch(self, typ):
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
# it's easier to just ignore that case.
try:
if can_handle(typ):
return handler
except Exception:
pass
raise KeyError("unable to find handler for {0}".format(typ))
|
from ._compat import lru_cache
class FunctionDispatch(object):
"""
FunctionDispatch is similar to functools.singledispatch, but
instead dispatches based on functions that take the type of the
first argument in the method, and return True or False.
objects that help determine dispatch should be instantiated objects.
"""
__slots__ = ('_handler_pairs', 'dispatch')
def __init__(self):
self._handler_pairs = []
self.dispatch = lru_cache(64)(self._dispatch)
def register(self, can_handle, func):
self._handler_pairs.insert(0, (can_handle, func))
self.dispatch.cache_clear()
def _dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
# it's easier to just ignore that case.
try:
if can_handle(typ):
return handler
except Exception:
pass
raise KeyError("unable to find handler for {0}".format(typ))
|
Use lru_cache instead of a dict cache in FunctionDispatch
|
Use lru_cache instead of a dict cache in FunctionDispatch
This has no affect on the microbenchmark, but seems appropriate for
consistency with the previous commit.
|
Python
|
mit
|
python-attrs/cattrs,Tinche/cattrs
|
python
|
## Code Before:
import attr
@attr.s(slots=True)
class FunctionDispatch(object):
"""
FunctionDispatch is similar to functools.singledispatch, but
instead dispatches based on functions that take the type of the
first argument in the method, and return True or False.
objects that help determine dispatch should be instantiated objects.
"""
_handler_pairs = attr.ib(init=False, default=attr.Factory(list))
_cache = attr.ib(init=False, default=attr.Factory(dict))
def register(self, can_handle, func):
self._handler_pairs.insert(0, (can_handle, func))
self._cache.clear()
def dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
try:
return self._cache[typ]
except KeyError:
self._cache[typ] = self._dispatch(typ)
return self._cache[typ]
def _dispatch(self, typ):
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
# it's easier to just ignore that case.
try:
if can_handle(typ):
return handler
except Exception:
pass
raise KeyError("unable to find handler for {0}".format(typ))
## Instruction:
Use lru_cache instead of a dict cache in FunctionDispatch
This has no affect on the microbenchmark, but seems appropriate for
consistency with the previous commit.
## Code After:
from ._compat import lru_cache
class FunctionDispatch(object):
"""
FunctionDispatch is similar to functools.singledispatch, but
instead dispatches based on functions that take the type of the
first argument in the method, and return True or False.
objects that help determine dispatch should be instantiated objects.
"""
__slots__ = ('_handler_pairs', 'dispatch')
def __init__(self):
self._handler_pairs = []
self.dispatch = lru_cache(64)(self._dispatch)
def register(self, can_handle, func):
self._handler_pairs.insert(0, (can_handle, func))
self.dispatch.cache_clear()
def _dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
# it's easier to just ignore that case.
try:
if can_handle(typ):
return handler
except Exception:
pass
raise KeyError("unable to find handler for {0}".format(typ))
|
...
from ._compat import lru_cache
class FunctionDispatch(object):
"""
FunctionDispatch is similar to functools.singledispatch, but
...
objects that help determine dispatch should be instantiated objects.
"""
__slots__ = ('_handler_pairs', 'dispatch')
def __init__(self):
self._handler_pairs = []
self.dispatch = lru_cache(64)(self._dispatch)
def register(self, can_handle, func):
self._handler_pairs.insert(0, (can_handle, func))
self.dispatch.cache_clear()
def _dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
...
|
a302a8bb55920e4db99aaa1c6369707b0d90a9d5
|
jkind-api/src/jkind/api/results/MapRenaming.java
|
jkind-api/src/jkind/api/results/MapRenaming.java
|
package jkind.api.results;
import java.util.Map;
/**
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private Map<String, String> map;
public MapRenaming(Map<String, String> map) {
this.map = map;
}
@Override
public String rename(String original) {
return map.get(original);
}
}
|
package jkind.api.results;
import java.util.Map;
/**
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private final Map<String, String> map;
private final Mode mode;
public static enum Mode {
NULL, IDENTITY
}
public MapRenaming(Map<String, String> map, Mode mode) {
this.map = map;
this.mode = mode;
}
@Override
public String rename(String original) {
String renamed = map.get(original);
if (renamed == null && mode == Mode.IDENTITY) {
return original;
} else {
return renamed;
}
}
}
|
Add two options for map renamings
|
Add two options for map renamings
|
Java
|
bsd-3-clause
|
backesj/jkind,andrewkatis/jkind-1,lgwagner/jkind,agacek/jkind,lgwagner/jkind,backesj/jkind,agacek/jkind,andrewkatis/jkind-1
|
java
|
## Code Before:
package jkind.api.results;
import java.util.Map;
/**
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private Map<String, String> map;
public MapRenaming(Map<String, String> map) {
this.map = map;
}
@Override
public String rename(String original) {
return map.get(original);
}
}
## Instruction:
Add two options for map renamings
## Code After:
package jkind.api.results;
import java.util.Map;
/**
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private final Map<String, String> map;
private final Mode mode;
public static enum Mode {
NULL, IDENTITY
}
public MapRenaming(Map<String, String> map, Mode mode) {
this.map = map;
this.mode = mode;
}
@Override
public String rename(String original) {
String renamed = map.get(original);
if (renamed == null && mode == Mode.IDENTITY) {
return original;
} else {
return renamed;
}
}
}
|
// ... existing code ...
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private final Map<String, String> map;
private final Mode mode;
public static enum Mode {
NULL, IDENTITY
}
public MapRenaming(Map<String, String> map, Mode mode) {
this.map = map;
this.mode = mode;
}
@Override
public String rename(String original) {
String renamed = map.get(original);
if (renamed == null && mode == Mode.IDENTITY) {
return original;
} else {
return renamed;
}
}
}
// ... rest of the code ...
|
afd6a6dcac92c604ff97892e3c95f845dd479bc5
|
includes/grid_cell.h
|
includes/grid_cell.h
|
/*
DATA STRUCTURES
*/
#define GC_NUM_STATES 4
typedef enum { DECIDUOUS,CONIFEROUS,TRANSITIONAL,MIXED} State;
typedef double StateData [GC_NUM_STATES];
typedef enum { MOORE, VONNE} NeighType;
extern State GC_POSSIBLE_STATES [GC_NUM_STATES]; // this is an array giving all possible states
typedef struct {
double meanTemp;
/*Task: list all variables need to be used*/
// Fill later
} Climate;
typedef struct {
State* currentState;
State* stateHistory;
Climate climate;
StateData prevalence;
StateData transitionProbs;
size_t historySize;
} GridCell;
/*.
FUNCTION PROTOTYPES
*/
GridCell* gc_make_cell (size_t numTimeSteps); // allocate memory and null initialize cell, initialize pointers
void gc_get_trans_prob (GridCell* cell);
void gc_select_new_state (GridCell* cell, gsl_rng* rng);
void gc_destroy_cell(GridCell *cell);
#endif
|
/*
DATA STRUCTURES
*/
#define GC_NUM_STATES 4
typedef enum { DECIDUOUS,CONIFEROUS,TRANSITIONAL,MIXED} State;
typedef double StateData [GC_NUM_STATES];
typedef enum { MOORE, VONNE} NeighType;
typedef struct {
double meanTemp;
/*Task: list all variables need to be used*/
// Fill later
} Climate;
typedef struct {
State* currentState;
State* stateHistory;
Climate climate;
StateData prevalence;
StateData transitionProbs;
size_t historySize;
} GridCell;
/*.
FUNCTION PROTOTYPES
*/
GridCell* gc_make_cell (size_t numTimeSteps); // allocate memory and null initialize cell, initialize pointers
void gc_get_trans_prob (GridCell* cell);
void gc_select_new_state (GridCell* cell, gsl_rng* rng);
void gc_destroy_cell(GridCell *cell);
#endif
|
Delete unused extern variable declared
|
Delete unused extern variable declared
|
C
|
mit
|
QUICC-FOR/STModel-Simulation,QUICC-FOR/STModel-Simulation
|
c
|
## Code Before:
/*
DATA STRUCTURES
*/
#define GC_NUM_STATES 4
typedef enum { DECIDUOUS,CONIFEROUS,TRANSITIONAL,MIXED} State;
typedef double StateData [GC_NUM_STATES];
typedef enum { MOORE, VONNE} NeighType;
extern State GC_POSSIBLE_STATES [GC_NUM_STATES]; // this is an array giving all possible states
typedef struct {
double meanTemp;
/*Task: list all variables need to be used*/
// Fill later
} Climate;
typedef struct {
State* currentState;
State* stateHistory;
Climate climate;
StateData prevalence;
StateData transitionProbs;
size_t historySize;
} GridCell;
/*.
FUNCTION PROTOTYPES
*/
GridCell* gc_make_cell (size_t numTimeSteps); // allocate memory and null initialize cell, initialize pointers
void gc_get_trans_prob (GridCell* cell);
void gc_select_new_state (GridCell* cell, gsl_rng* rng);
void gc_destroy_cell(GridCell *cell);
#endif
## Instruction:
Delete unused extern variable declared
## Code After:
/*
DATA STRUCTURES
*/
#define GC_NUM_STATES 4
typedef enum { DECIDUOUS,CONIFEROUS,TRANSITIONAL,MIXED} State;
typedef double StateData [GC_NUM_STATES];
typedef enum { MOORE, VONNE} NeighType;
typedef struct {
double meanTemp;
/*Task: list all variables need to be used*/
// Fill later
} Climate;
typedef struct {
State* currentState;
State* stateHistory;
Climate climate;
StateData prevalence;
StateData transitionProbs;
size_t historySize;
} GridCell;
/*.
FUNCTION PROTOTYPES
*/
GridCell* gc_make_cell (size_t numTimeSteps); // allocate memory and null initialize cell, initialize pointers
void gc_get_trans_prob (GridCell* cell);
void gc_select_new_state (GridCell* cell, gsl_rng* rng);
void gc_destroy_cell(GridCell *cell);
#endif
|
# ... existing code ...
#define GC_NUM_STATES 4
typedef enum { DECIDUOUS,CONIFEROUS,TRANSITIONAL,MIXED} State;
typedef double StateData [GC_NUM_STATES];
typedef enum { MOORE, VONNE} NeighType;
typedef struct {
double meanTemp;
# ... rest of the code ...
|
1231b55752fffd47f3a318def77b466b6e64f801
|
MFIblocks/experiments/src/main/java/il/ac/technion/ie/experiments/Utils/ExpFileUtils.java
|
MFIblocks/experiments/src/main/java/il/ac/technion/ie/experiments/Utils/ExpFileUtils.java
|
package il.ac.technion.ie.experiments.Utils;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
/**
* Created by I062070 on 10/09/2015.
*/
public class ExpFileUtils {
static final Logger logger = Logger.getLogger(ExpFileUtils.class);
public static File createFile(String pathToFile) {
File outputFile = new File(pathToFile);
if (outputFile.exists()) {
logger.info("Output file exists. Deleting " + pathToFile);
if (!outputFile.delete()) {
logger.warn("Failed to delete output file");
}
}
try {
logger.info("Creating the output file");
outputFile.createNewFile();
} catch (IOException e) {
logger.error("Failed to create output file", e);
}
return outputFile;
}
public static String getOutputFilePath(String fileName, String fileSuffix) {
File runningDir = new File(System.getProperty("user.dir"));
File parentFile = runningDir.getParentFile();
return parentFile.getAbsolutePath() + File.separator + fileName + fileSuffix;
}
}
|
package il.ac.technion.ie.experiments.Utils;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
/**
* Created by I062070 on 10/09/2015.
*/
public class ExpFileUtils {
static final Logger logger = Logger.getLogger(ExpFileUtils.class);
public static File createFile(String pathToFile) {
File outputFile = new File(pathToFile);
if (outputFile.exists()) {
logger.info("Output file exists. Deleting " + pathToFile);
if (!outputFile.delete()) {
logger.warn("Failed to delete output file");
}
}
try {
logger.info("Creating the output file");
outputFile.createNewFile();
} catch (IOException e) {
logger.error("Failed to create output file", e);
}
return outputFile;
}
public static String getOutputFilePath(String fileName, String fileSuffix) {
File runningDir = new File(System.getProperty("user.dir"));
File parentFile = runningDir.getParentFile();
return parentFile.getAbsolutePath() + File.separator + fileName + fileSuffix;
}
public static File createOutputFile(String filename) {
File expResults = null;
try {
expResults = new File(filename);
if (expResults.exists()) {
FileUtils.forceDelete(expResults);
}
if (!expResults.createNewFile()) {
expResults = null;
}
} catch (IOException e) {
logger.error("Failed to create file for measurements of Experiment", e);
}
return expResults;
}
}
|
Add a generic method that creates an output file
|
Add a generic method that creates an output file
|
Java
|
mit
|
sapirgolan/MFIBlocking,sapirgolan/MFIBlocking,sapirgolan/MFIBlocking,sapirgolan/MFIBlocking,sapirgolan/MFIBlocking,sapirgolan/MFIBlocking,sapirgolan/MFIBlocking
|
java
|
## Code Before:
package il.ac.technion.ie.experiments.Utils;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
/**
* Created by I062070 on 10/09/2015.
*/
public class ExpFileUtils {
static final Logger logger = Logger.getLogger(ExpFileUtils.class);
public static File createFile(String pathToFile) {
File outputFile = new File(pathToFile);
if (outputFile.exists()) {
logger.info("Output file exists. Deleting " + pathToFile);
if (!outputFile.delete()) {
logger.warn("Failed to delete output file");
}
}
try {
logger.info("Creating the output file");
outputFile.createNewFile();
} catch (IOException e) {
logger.error("Failed to create output file", e);
}
return outputFile;
}
public static String getOutputFilePath(String fileName, String fileSuffix) {
File runningDir = new File(System.getProperty("user.dir"));
File parentFile = runningDir.getParentFile();
return parentFile.getAbsolutePath() + File.separator + fileName + fileSuffix;
}
}
## Instruction:
Add a generic method that creates an output file
## Code After:
package il.ac.technion.ie.experiments.Utils;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
/**
* Created by I062070 on 10/09/2015.
*/
public class ExpFileUtils {
static final Logger logger = Logger.getLogger(ExpFileUtils.class);
public static File createFile(String pathToFile) {
File outputFile = new File(pathToFile);
if (outputFile.exists()) {
logger.info("Output file exists. Deleting " + pathToFile);
if (!outputFile.delete()) {
logger.warn("Failed to delete output file");
}
}
try {
logger.info("Creating the output file");
outputFile.createNewFile();
} catch (IOException e) {
logger.error("Failed to create output file", e);
}
return outputFile;
}
public static String getOutputFilePath(String fileName, String fileSuffix) {
File runningDir = new File(System.getProperty("user.dir"));
File parentFile = runningDir.getParentFile();
return parentFile.getAbsolutePath() + File.separator + fileName + fileSuffix;
}
public static File createOutputFile(String filename) {
File expResults = null;
try {
expResults = new File(filename);
if (expResults.exists()) {
FileUtils.forceDelete(expResults);
}
if (!expResults.createNewFile()) {
expResults = null;
}
} catch (IOException e) {
logger.error("Failed to create file for measurements of Experiment", e);
}
return expResults;
}
}
|
...
package il.ac.technion.ie.experiments.Utils;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import java.io.File;
...
File parentFile = runningDir.getParentFile();
return parentFile.getAbsolutePath() + File.separator + fileName + fileSuffix;
}
public static File createOutputFile(String filename) {
File expResults = null;
try {
expResults = new File(filename);
if (expResults.exists()) {
FileUtils.forceDelete(expResults);
}
if (!expResults.createNewFile()) {
expResults = null;
}
} catch (IOException e) {
logger.error("Failed to create file for measurements of Experiment", e);
}
return expResults;
}
}
...
|
ddfeb1e9ef60e1913bf702e58cf4696cf7c98c6d
|
logicmind/token_parser.py
|
logicmind/token_parser.py
|
from tokens.andd import And
from tokens.expression import Expression
from tokens.iff import Iff
from tokens.nop import Not
from tokens.orr import Or
from tokens.then import Then
from tokens.variable import Variable
class TokenParser:
"""This parser only works with atomic expressions,
so parenthesis are needed everywhere to group items"""
@staticmethod
def parse_expression(string):
# Separate parenthesis so they're new tokens
# Also convert [ or { to the same parenthesis (
for s in '([{':
string = string.replace(s, ' ( ')
for s in ')]}':
string = string.replace(s, ' ) ')
# Get all the tokens
words = string.split()
expressions_stack = [Expression()]
for w in words:
if w == '(':
expressions_stack.append(Expression())
elif w == ')':
e = expressions_stack.pop()
expressions_stack[-1].add_token(e)
elif w == '¬':
expressions_stack[-1].add_token(Not())
elif w == '->':
expressions_stack[-1].add_token(Then())
elif w == '<->':
expressions_stack[-1].add_token(Iff())
elif w == 'v':
expressions_stack[-1].add_token(Or())
elif w == '^':
expressions_stack[-1].add_token(And())
else:
expressions_stack[-1].add_token(Variable(w))
return expressions_stack[0]
|
from tokens.andd import And
from tokens.expression import Expression
from tokens.iff import Iff
from tokens.nop import Not
from tokens.orr import Or
from tokens.then import Then
from tokens.variable import Variable
class TokenParser:
"""This parser only works with atomic expressions,
so parenthesis are needed everywhere to group items"""
@staticmethod
def parse_expression(string):
# Separate parenthesis so they're new tokens
# Also convert [ or { to the same parenthesis (
for s in '([{':
string = string.replace(s, ' ( ')
for s in ')]}':
string = string.replace(s, ' ) ')
# Get all operators so we can iterate over them
operators = [Not, Then, Iff, Or, And]
# Get all the tokens
words = string.split()
# Store the found nested expressions on the stack
expressions_stack = [Expression()]
for w in words:
done = False
for operator in operators:
if w in operator.representations:
expressions_stack[-1].add_token(operator())
done = True
break
if done:
pass
elif w == '(':
expressions_stack.append(Expression())
elif w == ')':
e = expressions_stack.pop()
expressions_stack[-1].add_token(e)
else:
expressions_stack[-1].add_token(Variable(w))
return expressions_stack[0]
|
Allow more representations when parsing
|
[logicmind] Allow more representations when parsing
|
Python
|
mit
|
LonamiWebs/Py-Utils
|
python
|
## Code Before:
from tokens.andd import And
from tokens.expression import Expression
from tokens.iff import Iff
from tokens.nop import Not
from tokens.orr import Or
from tokens.then import Then
from tokens.variable import Variable
class TokenParser:
"""This parser only works with atomic expressions,
so parenthesis are needed everywhere to group items"""
@staticmethod
def parse_expression(string):
# Separate parenthesis so they're new tokens
# Also convert [ or { to the same parenthesis (
for s in '([{':
string = string.replace(s, ' ( ')
for s in ')]}':
string = string.replace(s, ' ) ')
# Get all the tokens
words = string.split()
expressions_stack = [Expression()]
for w in words:
if w == '(':
expressions_stack.append(Expression())
elif w == ')':
e = expressions_stack.pop()
expressions_stack[-1].add_token(e)
elif w == '¬':
expressions_stack[-1].add_token(Not())
elif w == '->':
expressions_stack[-1].add_token(Then())
elif w == '<->':
expressions_stack[-1].add_token(Iff())
elif w == 'v':
expressions_stack[-1].add_token(Or())
elif w == '^':
expressions_stack[-1].add_token(And())
else:
expressions_stack[-1].add_token(Variable(w))
return expressions_stack[0]
## Instruction:
[logicmind] Allow more representations when parsing
## Code After:
from tokens.andd import And
from tokens.expression import Expression
from tokens.iff import Iff
from tokens.nop import Not
from tokens.orr import Or
from tokens.then import Then
from tokens.variable import Variable
class TokenParser:
"""This parser only works with atomic expressions,
so parenthesis are needed everywhere to group items"""
@staticmethod
def parse_expression(string):
# Separate parenthesis so they're new tokens
# Also convert [ or { to the same parenthesis (
for s in '([{':
string = string.replace(s, ' ( ')
for s in ')]}':
string = string.replace(s, ' ) ')
# Get all operators so we can iterate over them
operators = [Not, Then, Iff, Or, And]
# Get all the tokens
words = string.split()
# Store the found nested expressions on the stack
expressions_stack = [Expression()]
for w in words:
done = False
for operator in operators:
if w in operator.representations:
expressions_stack[-1].add_token(operator())
done = True
break
if done:
pass
elif w == '(':
expressions_stack.append(Expression())
elif w == ')':
e = expressions_stack.pop()
expressions_stack[-1].add_token(e)
else:
expressions_stack[-1].add_token(Variable(w))
return expressions_stack[0]
|
// ... existing code ...
for s in ')]}':
string = string.replace(s, ' ) ')
# Get all operators so we can iterate over them
operators = [Not, Then, Iff, Or, And]
# Get all the tokens
words = string.split()
# Store the found nested expressions on the stack
expressions_stack = [Expression()]
for w in words:
done = False
for operator in operators:
if w in operator.representations:
expressions_stack[-1].add_token(operator())
done = True
break
if done:
pass
elif w == '(':
expressions_stack.append(Expression())
elif w == ')':
e = expressions_stack.pop()
expressions_stack[-1].add_token(e)
else:
expressions_stack[-1].add_token(Variable(w))
// ... rest of the code ...
|
f4d7f7207cff82c38d6973dbef717bfc50345b32
|
models.py
|
models.py
|
from django.db import models
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
class Image(models.Model):
pixel_width = models.IntegerField()
pixel_height = models.IntegerField()
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
class Media(models.Model):
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
price_cents = models.IntegerField()
weight_oz = models.FloatField()
exterior_width = models.FloatField()
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
|
from django.db import models
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
def __unicode__(self):
return "Fandom tree node %s" % self.name
class Image(models.Model):
pixel_width = models.IntegerField()
pixel_height = models.IntegerField()
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
def __unicode__(self):
return "Image instance %s (%dx%d)" % (self.name, self.pixel_width, self.pixel_height)
class Media(models.Model):
name = models.CharField(max_length=100)
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
price_cents = models.IntegerField()
weight_oz = models.FloatField()
exterior_width = models.FloatField()
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
def __unicode__(self):
return "Media instance %s, %d in stock" % (self.name, self.stock_amount)
|
Add __unicode__ methods to model
|
Add __unicode__ methods to model
|
Python
|
bsd-3-clause
|
willmurnane/store
|
python
|
## Code Before:
from django.db import models
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
class Image(models.Model):
pixel_width = models.IntegerField()
pixel_height = models.IntegerField()
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
class Media(models.Model):
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
price_cents = models.IntegerField()
weight_oz = models.FloatField()
exterior_width = models.FloatField()
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
## Instruction:
Add __unicode__ methods to model
## Code After:
from django.db import models
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
def __unicode__(self):
return "Fandom tree node %s" % self.name
class Image(models.Model):
pixel_width = models.IntegerField()
pixel_height = models.IntegerField()
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
def __unicode__(self):
return "Image instance %s (%dx%d)" % (self.name, self.pixel_width, self.pixel_height)
class Media(models.Model):
name = models.CharField(max_length=100)
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
price_cents = models.IntegerField()
weight_oz = models.FloatField()
exterior_width = models.FloatField()
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
def __unicode__(self):
return "Media instance %s, %d in stock" % (self.name, self.stock_amount)
|
...
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
def __unicode__(self):
return "Fandom tree node %s" % self.name
class Image(models.Model):
pixel_width = models.IntegerField()
...
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
def __unicode__(self):
return "Image instance %s (%dx%d)" % (self.name, self.pixel_width, self.pixel_height)
class Media(models.Model):
name = models.CharField(max_length=100)
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
...
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
def __unicode__(self):
return "Media instance %s, %d in stock" % (self.name, self.stock_amount)
...
|
71f00a03d6cbe4dc4d3cd2362ef91bd192a9a31e
|
who_broke_build.py
|
who_broke_build.py
|
import json
import re
import requests
import socket
import settings
def get_responsible_user(full_url):
members = settings.TEAM_MEMBERS
response = requests.get(
full_url,
auth=(
settings.JENKINS_USERNAME,
settings.JENKINS_PASSWORD
)
)
for each in members:
if ('Started by GitHub push by ' + each in response.content or \
'Started by user ' + each in response.content):
return each
def wait_for_event():
return True
def jenkins_wait_for_event():
sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM)
sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT))
while wait_for_event():
data, _ = sock.recvfrom(8 * 1024)
notification_data = json.loads(data)
status = notification_data['build']['status'].upper()
phase = notification_data['build']['phase'].upper()
if phase == 'COMPLETED' and status.startswith('FAIL'):
target = get_responsible_user(
notification_data['build']['full_url']
)
|
import json
import re
import requests
import socket
import settings
def get_responsible_user(full_url):
members = settings.TEAM_MEMBERS
response = requests.get(
full_url,
auth=(
settings.JENKINS_USERNAME,
settings.JENKINS_PASSWORD
)
)
for each in members:
if ('Started by GitHub push by ' + each in response.content or \
'Started by user ' + each in response.content):
return each
def wait_for_event():
return True
def jenkins_wait_for_event():
sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM)
sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT))
while wait_for_event():
data, _ = sock.recvfrom(8 * 1024)
notification_data = json.loads(data)
status = notification_data['build']['status'].upper()
phase = notification_data['build']['phase'].upper()
if phase == 'COMPLETED' and status.startswith('FAIL'):
target = get_responsible_user(
notification_data['build']['full_url']
)
if __name__ == '__main__':
jenkins_wait_for_event()
|
Add point to execute program
|
Add point to execute program
|
Python
|
mit
|
mrteera/who-broke-build-slack,mrteera/who-broke-build-slack,zkan/who-broke-build-slack,prontodev/who-broke-build-slack,prontodev/who-broke-build-slack,zkan/who-broke-build-slack
|
python
|
## Code Before:
import json
import re
import requests
import socket
import settings
def get_responsible_user(full_url):
members = settings.TEAM_MEMBERS
response = requests.get(
full_url,
auth=(
settings.JENKINS_USERNAME,
settings.JENKINS_PASSWORD
)
)
for each in members:
if ('Started by GitHub push by ' + each in response.content or \
'Started by user ' + each in response.content):
return each
def wait_for_event():
return True
def jenkins_wait_for_event():
sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM)
sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT))
while wait_for_event():
data, _ = sock.recvfrom(8 * 1024)
notification_data = json.loads(data)
status = notification_data['build']['status'].upper()
phase = notification_data['build']['phase'].upper()
if phase == 'COMPLETED' and status.startswith('FAIL'):
target = get_responsible_user(
notification_data['build']['full_url']
)
## Instruction:
Add point to execute program
## Code After:
import json
import re
import requests
import socket
import settings
def get_responsible_user(full_url):
members = settings.TEAM_MEMBERS
response = requests.get(
full_url,
auth=(
settings.JENKINS_USERNAME,
settings.JENKINS_PASSWORD
)
)
for each in members:
if ('Started by GitHub push by ' + each in response.content or \
'Started by user ' + each in response.content):
return each
def wait_for_event():
return True
def jenkins_wait_for_event():
sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM)
sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT))
while wait_for_event():
data, _ = sock.recvfrom(8 * 1024)
notification_data = json.loads(data)
status = notification_data['build']['status'].upper()
phase = notification_data['build']['phase'].upper()
if phase == 'COMPLETED' and status.startswith('FAIL'):
target = get_responsible_user(
notification_data['build']['full_url']
)
if __name__ == '__main__':
jenkins_wait_for_event()
|
...
target = get_responsible_user(
notification_data['build']['full_url']
)
if __name__ == '__main__':
jenkins_wait_for_event()
...
|
06e5c8b39f9088830d04fd692f0858f4e3c5a678
|
model/src/main/java/org/gluu/oxtrust/model/RegistrationConfiguration.java
|
model/src/main/java/org/gluu/oxtrust/model/RegistrationConfiguration.java
|
/*
* oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
/**
*
*/
package org.gluu.oxtrust.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author "Oleksiy Tataryn"
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RegistrationConfiguration {
private List<String> additionalAttributes;
private boolean isCaptchaDisabled;
public List<String> getAdditionalAttributes() {
return additionalAttributes;
}
public void setAdditionalAttributes(List<String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
public boolean isCaptchaDisabled() {
return isCaptchaDisabled;
}
public void setCaptchaDisabled(boolean captchaDisabled) {
isCaptchaDisabled = captchaDisabled;
}
}
|
/*
* oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
/**
*
*/
package org.gluu.oxtrust.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author "Oleksiy Tataryn"
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RegistrationConfiguration implements Serializable {
/**
*
*/
private static final long serialVersionUID = -7310064771467874959L;
/**
*
*/
private List<String> additionalAttributes;
private boolean isCaptchaDisabled;
public List<String> getAdditionalAttributes() {
return additionalAttributes;
}
public void setAdditionalAttributes(List<String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
public boolean isCaptchaDisabled() {
return isCaptchaDisabled;
}
public void setCaptchaDisabled(boolean captchaDisabled) {
isCaptchaDisabled = captchaDisabled;
}
}
|
Fix serialization issue for oxRegistration class
|
Fix serialization issue for oxRegistration class
|
Java
|
mit
|
GluuFederation/oxTrust,GluuFederation/oxTrust,GluuFederation/oxTrust,GluuFederation/oxTrust,GluuFederation/oxTrust
|
java
|
## Code Before:
/*
* oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
/**
*
*/
package org.gluu.oxtrust.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author "Oleksiy Tataryn"
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RegistrationConfiguration {
private List<String> additionalAttributes;
private boolean isCaptchaDisabled;
public List<String> getAdditionalAttributes() {
return additionalAttributes;
}
public void setAdditionalAttributes(List<String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
public boolean isCaptchaDisabled() {
return isCaptchaDisabled;
}
public void setCaptchaDisabled(boolean captchaDisabled) {
isCaptchaDisabled = captchaDisabled;
}
}
## Instruction:
Fix serialization issue for oxRegistration class
## Code After:
/*
* oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
/**
*
*/
package org.gluu.oxtrust.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author "Oleksiy Tataryn"
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RegistrationConfiguration implements Serializable {
/**
*
*/
private static final long serialVersionUID = -7310064771467874959L;
/**
*
*/
private List<String> additionalAttributes;
private boolean isCaptchaDisabled;
public List<String> getAdditionalAttributes() {
return additionalAttributes;
}
public void setAdditionalAttributes(List<String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
public boolean isCaptchaDisabled() {
return isCaptchaDisabled;
}
public void setCaptchaDisabled(boolean captchaDisabled) {
isCaptchaDisabled = captchaDisabled;
}
}
|
# ... existing code ...
*/
package org.gluu.oxtrust.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
# ... modified code ...
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RegistrationConfiguration implements Serializable {
/**
*
*/
private static final long serialVersionUID = -7310064771467874959L;
/**
*
*/
private List<String> additionalAttributes;
private boolean isCaptchaDisabled;
# ... rest of the code ...
|
2dd484154d25351079da5eaa84cb2d1a0224ce53
|
Instanssi/admin_base/views.py
|
Instanssi/admin_base/views.py
|
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
Select latest event when logged in.
|
admin_base: Select latest event when logged in.
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
python
|
## Code Before:
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
## Instruction:
admin_base: Select latest event when logged in.
## Code After:
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
...
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
...
|
ac5c03cef0f0b3676b22e66e89f74ec33f69e9c6
|
tests/python/utils.py
|
tests/python/utils.py
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
Add a generic utility to check any binary availability
|
Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <[email protected]>
|
Python
|
apache-2.0
|
mcaleavya/bcc,brendangregg/bcc,iovisor/bcc,brendangregg/bcc,tuxology/bcc,mcaleavya/bcc,brendangregg/bcc,iovisor/bcc,iovisor/bcc,tuxology/bcc,tuxology/bcc,iovisor/bcc,brendangregg/bcc,brendangregg/bcc,mcaleavya/bcc,mcaleavya/bcc,iovisor/bcc,mcaleavya/bcc,tuxology/bcc,tuxology/bcc
|
python
|
## Code Before:
from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
## Instruction:
Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <[email protected]>
## Code After:
from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
# ... existing code ...
from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
# ... modified code ...
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
# ... rest of the code ...
|
f58940027a0e152ba68917a4b85dd1dfed1095a9
|
appname/server.py
|
appname/server.py
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
Add data: and unsafe-local for base64 fonts and inline js
|
Add data: and unsafe-local for base64 fonts and inline js
|
Python
|
mit
|
LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples
|
python
|
## Code Before:
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
## Instruction:
Add data: and unsafe-local for base64 fonts and inline js
## Code After:
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
# ... existing code ...
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
# ... rest of the code ...
|
f26202f688f7612971e35b0ae33a2f961a117876
|
select_multiple_field/widgets.py
|
select_multiple_field/widgets.py
|
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
Use format_html if it is available, fallback for dj 1.4
|
Use format_html if it is available, fallback for dj 1.4
|
Python
|
bsd-3-clause
|
kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = ['<select multiple="multiple"{0}>'.format(flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
## Instruction:
Use format_html if it is available, fallback for dj 1.4
## Code After:
from __future__ import unicode_literals
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
"""Multiple select widget ready for jQuery multiselect.js"""
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
rendered_attrs = {'class': 'select-multiple-field'}
rendered_attrs.update(attrs)
if value is None:
value = []
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
|
# ... existing code ...
from django.forms import widgets
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
try:
from django.utils.html import format_html
except ImportError:
def format_html(format_string, *args, **kwargs):
return format_string.format(*args, **kwargs)
class SelectMultipleField(widgets.SelectMultiple):
# ... modified code ...
final_attrs = self.build_attrs(rendered_attrs, name=name)
# output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
output = [format_html('<select multiple="multiple"{0}>',
flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
# ... rest of the code ...
|
5fc7b8379507bbd1d66f2ff3ac3d6f36844db4b1
|
com.developi.wink.demo/src/com/developi/wink/demo/api/TestResource.java
|
com.developi.wink.demo/src/com/developi/wink/demo/api/TestResource.java
|
package com.developi.wink.demo.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.ibm.commons.util.io.json.JsonJavaObject;
import com.ibm.domino.osgi.core.context.ContextInfo;
import lotus.domino.NotesException;
import lotus.domino.Session;
@Path("/test")
public class TestResource {
@GET
public Response test(){
JsonJavaObject resp = new JsonJavaObject();
try {
resp.put("message", "Hello " + getUserSession().getEffectiveUserName());
return Response.ok().type(MediaType.APPLICATION_JSON).entity(resp.toString()).build();
} catch (NotesException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
private Session getUserSession() {
return ContextInfo.getUserSession();
}
}
|
package com.developi.wink.demo.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.ibm.domino.osgi.core.context.ContextInfo;
import lotus.domino.NotesException;
import lotus.domino.Session;
@Path("/test")
public class TestResource {
@GET
public Response test(){
try {
String userName = getUserSession().getEffectiveUserName();
if("Anonymous".equals(userName)) {
// If connected through a browser, this response will signal for authentication and redirected to the login form.
// However, be careful with 40x responses. If session authentication is enabled for the server,
// The consumer might receive the login form and 200-OK response. This would be a code-breaker.
return Response.status(Status.UNAUTHORIZED).build();
} else {
String message = "<b>Hello " + userName + "</b>";
return Response.ok().type(MediaType.TEXT_HTML).entity(message).build();
}
} catch (NotesException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
private Session getUserSession() {
return ContextInfo.getUserSession();
}
}
|
Add authentication and modify response type for /test
|
Add authentication and modify response type for /test
|
Java
|
apache-2.0
|
sbasegmez/RestAssuredDemo
|
java
|
## Code Before:
package com.developi.wink.demo.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.ibm.commons.util.io.json.JsonJavaObject;
import com.ibm.domino.osgi.core.context.ContextInfo;
import lotus.domino.NotesException;
import lotus.domino.Session;
@Path("/test")
public class TestResource {
@GET
public Response test(){
JsonJavaObject resp = new JsonJavaObject();
try {
resp.put("message", "Hello " + getUserSession().getEffectiveUserName());
return Response.ok().type(MediaType.APPLICATION_JSON).entity(resp.toString()).build();
} catch (NotesException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
private Session getUserSession() {
return ContextInfo.getUserSession();
}
}
## Instruction:
Add authentication and modify response type for /test
## Code After:
package com.developi.wink.demo.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.ibm.domino.osgi.core.context.ContextInfo;
import lotus.domino.NotesException;
import lotus.domino.Session;
@Path("/test")
public class TestResource {
@GET
public Response test(){
try {
String userName = getUserSession().getEffectiveUserName();
if("Anonymous".equals(userName)) {
// If connected through a browser, this response will signal for authentication and redirected to the login form.
// However, be careful with 40x responses. If session authentication is enabled for the server,
// The consumer might receive the login form and 200-OK response. This would be a code-breaker.
return Response.status(Status.UNAUTHORIZED).build();
} else {
String message = "<b>Hello " + userName + "</b>";
return Response.ok().type(MediaType.TEXT_HTML).entity(message).build();
}
} catch (NotesException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
private Session getUserSession() {
return ContextInfo.getUserSession();
}
}
|
...
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.ibm.domino.osgi.core.context.ContextInfo;
import lotus.domino.NotesException;
...
@GET
public Response test(){
try {
String userName = getUserSession().getEffectiveUserName();
if("Anonymous".equals(userName)) {
// If connected through a browser, this response will signal for authentication and redirected to the login form.
// However, be careful with 40x responses. If session authentication is enabled for the server,
// The consumer might receive the login form and 200-OK response. This would be a code-breaker.
return Response.status(Status.UNAUTHORIZED).build();
} else {
String message = "<b>Hello " + userName + "</b>";
return Response.ok().type(MediaType.TEXT_HTML).entity(message).build();
}
} catch (NotesException e) {
e.printStackTrace();
return Response.serverError().build();
...
|
36614eb78a3c24ecc4a5ae8739c05a54d1d29c49
|
src/main/java/org/graylog2/GelfSender.java
|
src/main/java/org/graylog2/GelfSender.java
|
package org.graylog2;
import java.io.IOException;
import java.net.*;
import java.util.List;
public class GelfSender {
private static final int DEFAULT_PORT = 12201;
private static final int PORT_MIN = 8000;
private static final int PORT_MAX = 8888;
private InetAddress host;
private int port;
private DatagramSocket socket;
public GelfSender(String host) throws UnknownHostException, SocketException {
this(host, DEFAULT_PORT);
}
public GelfSender(String host, int port) throws UnknownHostException, SocketException {
this.host = InetAddress.getByName(host);
this.port = port;
this.socket = initiateSocket();
}
private DatagramSocket initiateSocket() throws SocketException {
int port = PORT_MIN;
DatagramSocket resultingSocket = null;
boolean binded = false;
while (!binded) {
try {
resultingSocket = new DatagramSocket(port);
binded = true;
} catch (SocketException e) {
port++;
if (port > PORT_MAX)
throw e;
}
}
return resultingSocket;
}
public boolean sendMessage(GelfMessage message) {
return message.isValid() && sendDatagrams(message.toDatagrams());
}
public boolean sendDatagrams(List<byte[]> bytesList) {
for (byte[] bytes : bytesList) {
DatagramPacket datagramPacket = new DatagramPacket(bytes, bytes.length, host, port);
try {
socket.send(datagramPacket);
} catch (IOException e) {
return false;
}
}
return true;
}
public void close() {
socket.close();
}
}
|
package org.graylog2;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.List;
public class GelfSender {
private static final int DEFAULT_PORT = 12201;
private InetAddress host;
private int port;
private DatagramSocket socket;
public GelfSender(String host) throws UnknownHostException, SocketException {
this(host, DEFAULT_PORT);
}
public GelfSender(String host, int port) throws UnknownHostException, SocketException {
this.host = InetAddress.getByName(host);
this.port = port;
this.socket = initiateSocket();
}
private DatagramSocket initiateSocket() throws SocketException {
return new DatagramSocket(0);
}
public boolean sendMessage(GelfMessage message) {
return message.isValid() && sendDatagrams(message.toDatagrams());
}
public boolean sendDatagrams(List<byte[]> bytesList) {
for (byte[] bytes : bytesList) {
DatagramPacket datagramPacket = new DatagramPacket(bytes, bytes.length, host, port);
try {
socket.send(datagramPacket);
} catch (IOException e) {
return false;
}
}
return true;
}
public void close() {
socket.close();
}
}
|
Simplify initialiazing a datagram by use source port 0 and letting OS find an available port
|
Simplify initialiazing a datagram by use source port 0 and letting OS
find an available port
|
Java
|
mit
|
t0xa/gelfj,pukkaone/logback-gelf,voidifremoved/gelfj,mstipanov/logback-gelf,gustajz/logback-gelf
|
java
|
## Code Before:
package org.graylog2;
import java.io.IOException;
import java.net.*;
import java.util.List;
public class GelfSender {
private static final int DEFAULT_PORT = 12201;
private static final int PORT_MIN = 8000;
private static final int PORT_MAX = 8888;
private InetAddress host;
private int port;
private DatagramSocket socket;
public GelfSender(String host) throws UnknownHostException, SocketException {
this(host, DEFAULT_PORT);
}
public GelfSender(String host, int port) throws UnknownHostException, SocketException {
this.host = InetAddress.getByName(host);
this.port = port;
this.socket = initiateSocket();
}
private DatagramSocket initiateSocket() throws SocketException {
int port = PORT_MIN;
DatagramSocket resultingSocket = null;
boolean binded = false;
while (!binded) {
try {
resultingSocket = new DatagramSocket(port);
binded = true;
} catch (SocketException e) {
port++;
if (port > PORT_MAX)
throw e;
}
}
return resultingSocket;
}
public boolean sendMessage(GelfMessage message) {
return message.isValid() && sendDatagrams(message.toDatagrams());
}
public boolean sendDatagrams(List<byte[]> bytesList) {
for (byte[] bytes : bytesList) {
DatagramPacket datagramPacket = new DatagramPacket(bytes, bytes.length, host, port);
try {
socket.send(datagramPacket);
} catch (IOException e) {
return false;
}
}
return true;
}
public void close() {
socket.close();
}
}
## Instruction:
Simplify initialiazing a datagram by use source port 0 and letting OS
find an available port
## Code After:
package org.graylog2;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.List;
public class GelfSender {
private static final int DEFAULT_PORT = 12201;
private InetAddress host;
private int port;
private DatagramSocket socket;
public GelfSender(String host) throws UnknownHostException, SocketException {
this(host, DEFAULT_PORT);
}
public GelfSender(String host, int port) throws UnknownHostException, SocketException {
this.host = InetAddress.getByName(host);
this.port = port;
this.socket = initiateSocket();
}
private DatagramSocket initiateSocket() throws SocketException {
return new DatagramSocket(0);
}
public boolean sendMessage(GelfMessage message) {
return message.isValid() && sendDatagrams(message.toDatagrams());
}
public boolean sendDatagrams(List<byte[]> bytesList) {
for (byte[] bytes : bytesList) {
DatagramPacket datagramPacket = new DatagramPacket(bytes, bytes.length, host, port);
try {
socket.send(datagramPacket);
} catch (IOException e) {
return false;
}
}
return true;
}
public void close() {
socket.close();
}
}
|
// ... existing code ...
package org.graylog2;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.List;
public class GelfSender {
private static final int DEFAULT_PORT = 12201;
private InetAddress host;
private int port;
// ... modified code ...
}
private DatagramSocket initiateSocket() throws SocketException {
return new DatagramSocket(0);
}
public boolean sendMessage(GelfMessage message) {
// ... rest of the code ...
|
2b7d5dd221477e233f77c8c729ee34f61600cfca
|
es-app/src/EmulationStation.h
|
es-app/src/EmulationStation.h
|
// These numbers and strings need to be manually updated for a new version.
// Do this version number update as the very last commit for the new release version.
#define PROGRAM_VERSION_MAJOR 2
#define PROGRAM_VERSION_MINOR 0
#define PROGRAM_VERSION_MAINTENANCE 1
#define PROGRAM_VERSION_STRING "2.0.1a"
#define PROGRAM_BUILT_STRING __DATE__ " - " __TIME__
#define RESOURCE_VERSION_STRING "2,0,1\0"
#define RESOURCE_VERSION PROGRAM_VERSION_MAJOR,PROGRAM_VERSION_MINOR,PROGRAM_VERSION_MAINTENANCE
|
// These numbers and strings need to be manually updated for a new version.
// Do this version number update as the very last commit for the new release version.
#define PROGRAM_VERSION_MAJOR 2
#define PROGRAM_VERSION_MINOR 0
#define PROGRAM_VERSION_MAINTENANCE 1
#define PROGRAM_VERSION_STRING "2.0.1b SDL mod by Krejza9"
#define PROGRAM_BUILT_STRING __DATE__ " - " __TIME__
#define RESOURCE_VERSION_STRING "2,0,1\0"
#define RESOURCE_VERSION PROGRAM_VERSION_MAJOR,PROGRAM_VERSION_MINOR,PROGRAM_VERSION_MAINTENANCE
|
Update version to 2.0.1b SDL mod by Krejza9.
|
Update version to 2.0.1b SDL mod by Krejza9.
|
C
|
mit
|
Krejza9/EmulationStation-sdlmixer,Krejza9/EmulationStation-sdlmixer,Krejza9/EmulationStation-sdlmixer
|
c
|
## Code Before:
// These numbers and strings need to be manually updated for a new version.
// Do this version number update as the very last commit for the new release version.
#define PROGRAM_VERSION_MAJOR 2
#define PROGRAM_VERSION_MINOR 0
#define PROGRAM_VERSION_MAINTENANCE 1
#define PROGRAM_VERSION_STRING "2.0.1a"
#define PROGRAM_BUILT_STRING __DATE__ " - " __TIME__
#define RESOURCE_VERSION_STRING "2,0,1\0"
#define RESOURCE_VERSION PROGRAM_VERSION_MAJOR,PROGRAM_VERSION_MINOR,PROGRAM_VERSION_MAINTENANCE
## Instruction:
Update version to 2.0.1b SDL mod by Krejza9.
## Code After:
// These numbers and strings need to be manually updated for a new version.
// Do this version number update as the very last commit for the new release version.
#define PROGRAM_VERSION_MAJOR 2
#define PROGRAM_VERSION_MINOR 0
#define PROGRAM_VERSION_MAINTENANCE 1
#define PROGRAM_VERSION_STRING "2.0.1b SDL mod by Krejza9"
#define PROGRAM_BUILT_STRING __DATE__ " - " __TIME__
#define RESOURCE_VERSION_STRING "2,0,1\0"
#define RESOURCE_VERSION PROGRAM_VERSION_MAJOR,PROGRAM_VERSION_MINOR,PROGRAM_VERSION_MAINTENANCE
|
# ... existing code ...
#define PROGRAM_VERSION_MAJOR 2
#define PROGRAM_VERSION_MINOR 0
#define PROGRAM_VERSION_MAINTENANCE 1
#define PROGRAM_VERSION_STRING "2.0.1b SDL mod by Krejza9"
#define PROGRAM_BUILT_STRING __DATE__ " - " __TIME__
# ... rest of the code ...
|
bf0d3c50784e0f7a1bb590a3979eba7c50726b1c
|
chap1/ftoc.c
|
chap1/ftoc.c
|
/* print Fahrenheit to Celsius table
* for Fahrenheit 0, 20, ..., 300 */
int main()
{
int fahr;
int cel;
int lower;
int upper;
int step;
lower = 0; /* lower bound for the table */
upper = 300; /* upper bound for the table */
step = 20; /* amount to step by */
fahr = lower;
while (fahr <= upper) {
cel = 5 * (fahr - 32) / 9;
printf("%3d\t%6d\n", fahr, cel);
fahr += step;
}
}
|
/* print Fahrenheit to Celsius table
* for Fahrenheit 0, 20, ..., 300 */
int main()
{
float fahr;
float cel;
int lower;
int upper;
int step;
lower = 0; /* lower bound for the table */
upper = 300; /* upper bound for the table */
step = 20; /* amount to step by */
fahr = lower;
while (fahr <= upper) {
cel = (5.0 / 9.0) * (fahr - 32.0);
printf("%3.0f\t%6.1f\n", fahr, cel);
fahr += step;
}
}
|
Change from `int` to `float`
|
Change from `int` to `float`
|
C
|
mit
|
jabocg/theclang
|
c
|
## Code Before:
/* print Fahrenheit to Celsius table
* for Fahrenheit 0, 20, ..., 300 */
int main()
{
int fahr;
int cel;
int lower;
int upper;
int step;
lower = 0; /* lower bound for the table */
upper = 300; /* upper bound for the table */
step = 20; /* amount to step by */
fahr = lower;
while (fahr <= upper) {
cel = 5 * (fahr - 32) / 9;
printf("%3d\t%6d\n", fahr, cel);
fahr += step;
}
}
## Instruction:
Change from `int` to `float`
## Code After:
/* print Fahrenheit to Celsius table
* for Fahrenheit 0, 20, ..., 300 */
int main()
{
float fahr;
float cel;
int lower;
int upper;
int step;
lower = 0; /* lower bound for the table */
upper = 300; /* upper bound for the table */
step = 20; /* amount to step by */
fahr = lower;
while (fahr <= upper) {
cel = (5.0 / 9.0) * (fahr - 32.0);
printf("%3.0f\t%6.1f\n", fahr, cel);
fahr += step;
}
}
|
// ... existing code ...
* for Fahrenheit 0, 20, ..., 300 */
int main()
{
float fahr;
float cel;
int lower;
int upper;
int step;
// ... modified code ...
fahr = lower;
while (fahr <= upper) {
cel = (5.0 / 9.0) * (fahr - 32.0);
printf("%3.0f\t%6.1f\n", fahr, cel);
fahr += step;
}
}
// ... rest of the code ...
|
c16006cd8983bbd73f52921c63a51aa6f29b9e88
|
ituro/accounts/tests.py
|
ituro/accounts/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
from django.utils import timezone
from accounts.models import CustomUser, CustomUserManager
class UserCreateTestCase(TestCase):
def test_create_user_correctly(self):
"Creating users correctly"
new_user = CustomUser.objects.create(
email="[email protected]",
name="Participant Name",
phone="09876543210",
school="Some University",
is_staff="False",
is_active="True",
date_joined=timezone.now())
self.assertTrue(isinstance(new_user, CustomUser))
self.assertEqual(new_user.get_full_name(), "Participant Name")
self.assertEqual(new_user.get_short_name(), "Participant Name")
|
Add test for creating accounts
|
Add test for creating accounts
|
Python
|
mit
|
bilbeyt/ituro,ITURO/ituro,bilbeyt/ituro,bilbeyt/ituro,ITURO/ituro,ITURO/ituro
|
python
|
## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Add test for creating accounts
## Code After:
from django.test import TestCase
from django.utils import timezone
from accounts.models import CustomUser, CustomUserManager
class UserCreateTestCase(TestCase):
def test_create_user_correctly(self):
"Creating users correctly"
new_user = CustomUser.objects.create(
email="[email protected]",
name="Participant Name",
phone="09876543210",
school="Some University",
is_staff="False",
is_active="True",
date_joined=timezone.now())
self.assertTrue(isinstance(new_user, CustomUser))
self.assertEqual(new_user.get_full_name(), "Participant Name")
self.assertEqual(new_user.get_short_name(), "Participant Name")
|
...
from django.test import TestCase
from django.utils import timezone
from accounts.models import CustomUser, CustomUserManager
class UserCreateTestCase(TestCase):
def test_create_user_correctly(self):
"Creating users correctly"
new_user = CustomUser.objects.create(
email="[email protected]",
name="Participant Name",
phone="09876543210",
school="Some University",
is_staff="False",
is_active="True",
date_joined=timezone.now())
self.assertTrue(isinstance(new_user, CustomUser))
self.assertEqual(new_user.get_full_name(), "Participant Name")
self.assertEqual(new_user.get_short_name(), "Participant Name")
...
|
78f049ce9713dabd3eec544494dadcab7ff93d4c
|
sui_hei/templatetags/markdown.py
|
sui_hei/templatetags/markdown.py
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
Add header id extension for github preferences
|
Add header id extension for github preferences
|
Python
|
mit
|
heyrict/cindy,heyrict/cindy,heyrict/cindy
|
python
|
## Code Before:
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
## Instruction:
Add header id extension for github preferences
## Code After:
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
# ... existing code ...
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
# ... modified code ...
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
# ... rest of the code ...
|
13c1410de300a7f414b51cb001534f021441a00f
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
import tempfile
from authentication import authentication
class SignupTests(unittest.TestCase):
"""
Signup tests.
"""
def test_signup(self):
"""
Test that a valid signup request returns an OK status.
"""
test_app = authentication.app.test_client()
def test_missing_username(self):
pass
class LoginTests(unittest.TestCase):
"""
Tests for logging in.
"""
if __name__ == '__main__':
unittest.main()
|
import unittest
import tempfile
from authentication import authentication
class SignupTests(unittest.TestCase):
"""
Signup tests.
"""
def test_signup(self):
"""
Test that a valid signup request returns an OK status.
"""
test_app = authentication.app.test_client()
signup = test_app.post('/users', data={})
self.assertEqual(signup.headers['Content-Type'], 'application/json')
def test_missing_username(self):
pass
class LoginTests(unittest.TestCase):
"""
Tests for logging in.
"""
if __name__ == '__main__':
unittest.main()
|
Test that there is a json content type
|
Test that there is a json content type
|
Python
|
mit
|
jenca-cloud/jenca-authentication
|
python
|
## Code Before:
import unittest
import tempfile
from authentication import authentication
class SignupTests(unittest.TestCase):
"""
Signup tests.
"""
def test_signup(self):
"""
Test that a valid signup request returns an OK status.
"""
test_app = authentication.app.test_client()
def test_missing_username(self):
pass
class LoginTests(unittest.TestCase):
"""
Tests for logging in.
"""
if __name__ == '__main__':
unittest.main()
## Instruction:
Test that there is a json content type
## Code After:
import unittest
import tempfile
from authentication import authentication
class SignupTests(unittest.TestCase):
"""
Signup tests.
"""
def test_signup(self):
"""
Test that a valid signup request returns an OK status.
"""
test_app = authentication.app.test_client()
signup = test_app.post('/users', data={})
self.assertEqual(signup.headers['Content-Type'], 'application/json')
def test_missing_username(self):
pass
class LoginTests(unittest.TestCase):
"""
Tests for logging in.
"""
if __name__ == '__main__':
unittest.main()
|
...
Test that a valid signup request returns an OK status.
"""
test_app = authentication.app.test_client()
signup = test_app.post('/users', data={})
self.assertEqual(signup.headers['Content-Type'], 'application/json')
def test_missing_username(self):
pass
...
|
30c21806dcc347326d6ac51be2adac9ff637f241
|
day20/part1.py
|
day20/part1.py
|
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l <= lowest <= r:
lowest = r + 1
print(lowest)
input()
|
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l > lowest:
break
if lowest <= r:
lowest = r + 1
print(lowest)
input()
|
Break the loop at the first gap
|
Break the loop at the first gap
|
Python
|
unlicense
|
ultramega/adventofcode2016
|
python
|
## Code Before:
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l <= lowest <= r:
lowest = r + 1
print(lowest)
input()
## Instruction:
Break the loop at the first gap
## Code After:
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l > lowest:
break
if lowest <= r:
lowest = r + 1
print(lowest)
input()
|
// ... existing code ...
lowest = 0
for l, r in ranges:
if l > lowest:
break
if lowest <= r:
lowest = r + 1
print(lowest)
// ... rest of the code ...
|
bf1cc589147429eb4cc125904c7c0690a6deaf1c
|
testsuite/N802.py
|
testsuite/N802.py
|
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
|
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
|
Add more tests around ignored names
|
Add more tests around ignored names
|
Python
|
mit
|
flintwork/pep8-naming
|
python
|
## Code Before:
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
## Instruction:
Add more tests around ignored names
## Code After:
def ok():
pass
#: N802
def __bad():
pass
#: N802
def bad__():
pass
#: N802
def __bad__():
pass
#: Okay
def _ok():
pass
#: Okay
def ok_ok_ok_ok():
pass
#: Okay
def _somehow_good():
pass
#: Okay
def go_od_():
pass
#: Okay
def _go_od_():
pass
#: N802
def NotOK():
pass
#: Okay
def _():
pass
#: Okay
class Foo(object):
def __method(self):
pass
#: Okay
class Foo(object):
def __method__(self):
pass
#: Okay
class ClassName(object):
def __method__(self):
pass
#: N802
class ClassName(object):
def notOk(self):
pass
#: N802
class ClassName(object):
def method(self):
def __bad():
pass
#: Okay
def setUp():
pass
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
|
...
#: Okay
def tearDown():
pass
#: Okay
class TestCase:
def setUp(self):
pass
def tearDown(self):
pass
...
|
351e88dd95db81418cc6d2deb4a943e2659292bc
|
wsgi.py
|
wsgi.py
|
import os
import sys
import site
VIRTUALENV="venv"
# Get site root from this file's location:
SITE_ROOT=os.path.abspath(os.path.dirname(__file__))
# Add virtualenv path to site package root:
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages"))
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages"))
# Add site package root to start of pythonpath:
sys.path.insert(0, SITE_ROOT)
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
import os
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Remove virtualenv setup from WSGI entrypoint
|
Remove virtualenv setup from WSGI entrypoint
Handle it in front end server instead.
|
Python
|
agpl-3.0
|
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
|
python
|
## Code Before:
import os
import sys
import site
VIRTUALENV="venv"
# Get site root from this file's location:
SITE_ROOT=os.path.abspath(os.path.dirname(__file__))
# Add virtualenv path to site package root:
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages"))
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages"))
# Add site package root to start of pythonpath:
sys.path.insert(0, SITE_ROOT)
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
## Instruction:
Remove virtualenv setup from WSGI entrypoint
Handle it in front end server instead.
## Code After:
import os
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
...
import os
# celery should now be available (on the virtualenv path)
import djcelery
...
|
0c3a0fd8eee8ca4ced29dbb69570aa1605ea0d5d
|
PEATSA/Database/Scripts/JobMailer.py
|
PEATSA/Database/Scripts/JobMailer.py
|
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
Print error on mailing a failed job note
|
Print error on mailing a failed job note
|
Python
|
mit
|
dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat,dmnfarrell/peat
|
python
|
## Code Before:
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
## Instruction:
Print error on mailing a failed job note
## Code After:
import sys, time, optparse, os
import PEATSA.Core as Core
import PEATSA.WebApp as WebApp
import ConstructJob
import MySQLdb
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage, version="% 0.1", description=__doc__)
parser.add_option("-c", "--configurationFile", dest="configurationFile",
help="A PEATSA configuration file.", metavar="CONF")
(options, args) = parser.parse_args()
if options.configurationFile is None:
print 'Configuration file must be provided'
sys.exit(1)
configuration = Core.Environment.Configuration(filename=options.configurationFile)
jobTable = configuration.get('DATABASE', 'jobTable')
while(1):
connection = WebApp.UtilityFunctions.ConnectionFromConfiguration(configuration)
jobManager = WebApp.Data.JobManager(connection=connection, jobTable=jobTable)
selectQuery = """SELECT JobId FROM %s WHERE SentMail='0' AND State='Finished' AND NOT Email='Unknown'""" % (jobTable)
cursor = connection.cursor()
cursor.execute(selectQuery)
ids = [el[0] for el in cursor.fetchall()]
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
|
// ... existing code ...
for id in ids:
job = WebApp.Data.Job(id, connection)
print 'Sending mail for job %s to %s' % (job.identification, job.email())
print job.error()
WebApp.UtilityFunctions.SendNotificationEmail(job)
connection.close()
time.sleep(30)
// ... rest of the code ...
|
a2054df4eb41712cec3194905f608396ff6473de
|
core/src/main/java/org/musetest/core/resource/generic/GenericResourceConfiguration.java
|
core/src/main/java/org/musetest/core/resource/generic/GenericResourceConfiguration.java
|
package org.musetest.core.resource.generic;
import org.musetest.core.resource.*;
import org.musetest.core.values.*;
import java.util.*;
/**
* A MuseResource that uses a general set of configuration parameters. It is intended for resources which
* have a few basic configurations for the user to supply. When used in conjunction with with the supported
* annotations, the IDE can generate an editor for those configuration settings without needing custom UI code.
*
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public abstract class GenericResourceConfiguration extends BaseMuseResource
{
/**
* Used for JSON serialization. Use parameters() to get access to the sub-sources
* for this configuration.
*/
public Map<String, ValueSourceConfiguration> getParameters()
{
return _parameters.getSourceMap();
}
public void setParameters(Map<String, ValueSourceConfiguration> sources)
{
_parameters.setSourceMap(sources);
}
public NamedSourcesContainer parameters()
{
return _parameters;
}
private NamedSourcesContainer _parameters = new NamedSourcesContainer();
}
|
package org.musetest.core.resource.generic;
import com.fasterxml.jackson.annotation.*;
import org.musetest.core.*;
import org.musetest.core.resource.*;
import org.musetest.core.values.*;
import java.util.*;
/**
* A MuseResource that uses a general set of configuration parameters. It is intended for resources which
* have a few basic configurations for the user to supply. When used in conjunction with with the supported
* annotations, the IDE can generate an editor for those configuration settings without needing custom UI code.
*
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public abstract class GenericResourceConfiguration extends BaseMuseResource
{
/**
* Used for JSON serialization. Use parameters() to get access to the sub-sources
* for this configuration.
*/
public Map<String, ValueSourceConfiguration> getParameters()
{
return _parameters.getSourceMap();
}
public void setParameters(Map<String, ValueSourceConfiguration> sources)
{
_parameters.setSourceMap(sources);
}
public NamedSourcesContainer parameters()
{
return _parameters;
}
private NamedSourcesContainer _parameters = new NamedSourcesContainer();
@SuppressWarnings("unused") // expect extensions to use this
@JsonIgnore
protected boolean isParameterTrue(MuseExecutionContext context, String name)
{
if (_parameters != null)
{
try
{
MuseValueSource source = BaseValueSource.getValueSource(_parameters, name, true, context.getProject());
return BaseValueSource.getValue(source, context, false, Boolean.class);
}
catch (MuseInstantiationException | ValueSourceResolutionError e)
{
return false;
}
}
return false;
}
}
|
Add convenience method for getting boolean sub-source values.
|
Add convenience method for getting boolean sub-source values.
|
Java
|
apache-2.0
|
ChrisLMerrill/muse,ChrisLMerrill/muse
|
java
|
## Code Before:
package org.musetest.core.resource.generic;
import org.musetest.core.resource.*;
import org.musetest.core.values.*;
import java.util.*;
/**
* A MuseResource that uses a general set of configuration parameters. It is intended for resources which
* have a few basic configurations for the user to supply. When used in conjunction with with the supported
* annotations, the IDE can generate an editor for those configuration settings without needing custom UI code.
*
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public abstract class GenericResourceConfiguration extends BaseMuseResource
{
/**
* Used for JSON serialization. Use parameters() to get access to the sub-sources
* for this configuration.
*/
public Map<String, ValueSourceConfiguration> getParameters()
{
return _parameters.getSourceMap();
}
public void setParameters(Map<String, ValueSourceConfiguration> sources)
{
_parameters.setSourceMap(sources);
}
public NamedSourcesContainer parameters()
{
return _parameters;
}
private NamedSourcesContainer _parameters = new NamedSourcesContainer();
}
## Instruction:
Add convenience method for getting boolean sub-source values.
## Code After:
package org.musetest.core.resource.generic;
import com.fasterxml.jackson.annotation.*;
import org.musetest.core.*;
import org.musetest.core.resource.*;
import org.musetest.core.values.*;
import java.util.*;
/**
* A MuseResource that uses a general set of configuration parameters. It is intended for resources which
* have a few basic configurations for the user to supply. When used in conjunction with with the supported
* annotations, the IDE can generate an editor for those configuration settings without needing custom UI code.
*
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public abstract class GenericResourceConfiguration extends BaseMuseResource
{
/**
* Used for JSON serialization. Use parameters() to get access to the sub-sources
* for this configuration.
*/
public Map<String, ValueSourceConfiguration> getParameters()
{
return _parameters.getSourceMap();
}
public void setParameters(Map<String, ValueSourceConfiguration> sources)
{
_parameters.setSourceMap(sources);
}
public NamedSourcesContainer parameters()
{
return _parameters;
}
private NamedSourcesContainer _parameters = new NamedSourcesContainer();
@SuppressWarnings("unused") // expect extensions to use this
@JsonIgnore
protected boolean isParameterTrue(MuseExecutionContext context, String name)
{
if (_parameters != null)
{
try
{
MuseValueSource source = BaseValueSource.getValueSource(_parameters, name, true, context.getProject());
return BaseValueSource.getValue(source, context, false, Boolean.class);
}
catch (MuseInstantiationException | ValueSourceResolutionError e)
{
return false;
}
}
return false;
}
}
|
# ... existing code ...
package org.musetest.core.resource.generic;
import com.fasterxml.jackson.annotation.*;
import org.musetest.core.*;
import org.musetest.core.resource.*;
import org.musetest.core.values.*;
# ... modified code ...
}
private NamedSourcesContainer _parameters = new NamedSourcesContainer();
@SuppressWarnings("unused") // expect extensions to use this
@JsonIgnore
protected boolean isParameterTrue(MuseExecutionContext context, String name)
{
if (_parameters != null)
{
try
{
MuseValueSource source = BaseValueSource.getValueSource(_parameters, name, true, context.getProject());
return BaseValueSource.getValue(source, context, false, Boolean.class);
}
catch (MuseInstantiationException | ValueSourceResolutionError e)
{
return false;
}
}
return false;
}
}
# ... rest of the code ...
|
6a5609dc518897d94d71a3611b890a3364b8917a
|
setup.py
|
setup.py
|
from taiga_ncurses import __name__, __description__, __version__
from setuptools import setup, find_packages
REQUIREMENTS = [
"requests==2.5.0",
"urwid>=1.3.0",
"x256==0.0.3"
]
NAME = __name__
DESCRIPTION = __description__
VERSION = "{0}.{1}".format(*__version__)
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
packages=find_packages(),
entry_points={
"console_scripts": ["taiga-ncurses = taiga_ncurses.cli:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: End Users/Desktop",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS",
"Programming Language :: Python :: 3.3",
],
install_requires=REQUIREMENTS,)
|
from __future__ import print_function
import sys
if sys.version_info[0] < 3 or sys.version_info[1] < 3:
print("Sorry, taiga-ncurses needs python >= 3.3", file=sys.stderr)
sys.exit(-1)
from taiga_ncurses import __name__, __description__, __version__
from setuptools import setup, find_packages
REQUIREMENTS = [
"requests==2.5.0",
"urwid>=1.3.0",
"x256==0.0.3"
]
NAME = __name__
DESCRIPTION = __description__
VERSION = "{0}.{1}".format(*__version__)
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
packages=find_packages(),
entry_points={
"console_scripts": ["taiga-ncurses = taiga_ncurses.cli:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: End Users/Desktop",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS",
"Programming Language :: Python :: 3.3",
],
install_requires=REQUIREMENTS,)
|
Check if you are using python 3.3+
|
Check if you are using python 3.3+
|
Python
|
apache-2.0
|
taigaio/taiga-ncurses,battlemidget/taiga-ncurses,wweiradio/taiga-ncurses
|
python
|
## Code Before:
from taiga_ncurses import __name__, __description__, __version__
from setuptools import setup, find_packages
REQUIREMENTS = [
"requests==2.5.0",
"urwid>=1.3.0",
"x256==0.0.3"
]
NAME = __name__
DESCRIPTION = __description__
VERSION = "{0}.{1}".format(*__version__)
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
packages=find_packages(),
entry_points={
"console_scripts": ["taiga-ncurses = taiga_ncurses.cli:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: End Users/Desktop",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS",
"Programming Language :: Python :: 3.3",
],
install_requires=REQUIREMENTS,)
## Instruction:
Check if you are using python 3.3+
## Code After:
from __future__ import print_function
import sys
if sys.version_info[0] < 3 or sys.version_info[1] < 3:
print("Sorry, taiga-ncurses needs python >= 3.3", file=sys.stderr)
sys.exit(-1)
from taiga_ncurses import __name__, __description__, __version__
from setuptools import setup, find_packages
REQUIREMENTS = [
"requests==2.5.0",
"urwid>=1.3.0",
"x256==0.0.3"
]
NAME = __name__
DESCRIPTION = __description__
VERSION = "{0}.{1}".format(*__version__)
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
packages=find_packages(),
entry_points={
"console_scripts": ["taiga-ncurses = taiga_ncurses.cli:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: End Users/Desktop",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS",
"Programming Language :: Python :: 3.3",
],
install_requires=REQUIREMENTS,)
|
...
from __future__ import print_function
import sys
if sys.version_info[0] < 3 or sys.version_info[1] < 3:
print("Sorry, taiga-ncurses needs python >= 3.3", file=sys.stderr)
sys.exit(-1)
from taiga_ncurses import __name__, __description__, __version__
...
|
1e58e41ba7f1d0a497856395057e798109e8d351
|
src/de/danoeh/antennapod/util/FileNameGenerator.java
|
src/de/danoeh/antennapod/util/FileNameGenerator.java
|
package de.danoeh.antennapod.util;
import java.util.Arrays;
/** Generates valid filenames for a given string. */
public class FileNameGenerator {
private static final char[] ILLEGAL_CHARACTERS = { '/', '\\', '?', '%',
'*', ':', '|', '"', '<', '>' };
static {
Arrays.sort(ILLEGAL_CHARACTERS);
}
private FileNameGenerator() {
}
/**
* This method will return a new string that doesn't contain any illegal
* characters of the given string.
*/
public static String generateFileName(String string) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (Arrays.binarySearch(ILLEGAL_CHARACTERS, c) < 0) {
builder.append(c);
}
}
return builder.toString();
}
public static long generateLong(final String str) {
return str.hashCode();
}
}
|
package de.danoeh.antennapod.util;
import java.util.Arrays;
/** Generates valid filenames for a given string. */
public class FileNameGenerator {
private static final char[] ILLEGAL_CHARACTERS = { '/', '\\', '?', '%',
'*', ':', '|', '"', '<', '>' };
static {
Arrays.sort(ILLEGAL_CHARACTERS);
}
private FileNameGenerator() {
}
/**
* This method will return a new string that doesn't contain any illegal
* characters of the given string.
*/
public static String generateFileName(String string) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (Arrays.binarySearch(ILLEGAL_CHARACTERS, c) < 0) {
builder.append(c).replaceFirst(" *$","");
}
}
return builder.toString();
}
public static long generateLong(final String str) {
return str.hashCode();
}
}
|
Remove trailing spaces in filenames
|
Remove trailing spaces in filenames
FAT does not support trailing spaces, so we must remove them
|
Java
|
mit
|
cdysthe/AntennaPod,twiceyuan/AntennaPod,wooi/AntennaPod,TomHennen/AntennaPod,gk23/AntennaPod,the100rabh/AntennaPod,SpicyCurry/AntennaPod,hgl888/AntennaPod,wskplho/AntennaPod,udif/AntennaPod,mxttie/AntennaPod,drabux/AntennaPod,the100rabh/AntennaPod,jimulabs/AntennaPod-mirror,mfietz/AntennaPod,udif/AntennaPod,facebookarchive/AntennaPod,LTUvac/AntennaPod,wooi/AntennaPod,gaohongyuan/AntennaPod,orelogo/AntennaPod,mfietz/AntennaPod,johnjohndoe/AntennaPod,richq/AntennaPod,TimB0/AntennaPod,twiceyuan/AntennaPod,domingos86/AntennaPod,volhol/AntennaPod,the100rabh/AntennaPod,orelogo/AntennaPod,TimB0/AntennaPod,mfietz/AntennaPod,drabux/AntennaPod,jimulabs/AntennaPod-mirror,samarone/AntennaPod,wangjun/AntennaPod,repat/AntennaPod,facebookarchive/AntennaPod,domingos86/AntennaPod,mfietz/AntennaPod,wangjun/AntennaPod,twiceyuan/AntennaPod,hgl888/AntennaPod,corecode/AntennaPod,TomHennen/AntennaPod,orelogo/AntennaPod,LTUvac/AntennaPod,gk23/AntennaPod,TomHennen/AntennaPod,domingos86/AntennaPod,gaohongyuan/AntennaPod,TimB0/AntennaPod,queenp/AntennaPod,corecode/AntennaPod,drabux/AntennaPod,wangjun/AntennaPod,samarone/AntennaPod,jimulabs/AntennaPod-mirror,wskplho/AntennaPod,cdysthe/AntennaPod,richq/AntennaPod,TomHennen/AntennaPod,mxttie/AntennaPod,udif/AntennaPod,ChaoticMind/AntennaPod,gaohongyuan/AntennaPod,corecode/AntennaPod,orelogo/AntennaPod,mxttie/AntennaPod,domingos86/AntennaPod,richq/AntennaPod,johnjohndoe/AntennaPod,waylife/AntennaPod,gk23/AntennaPod,keunes/AntennaPod,keunes/AntennaPod,TimB0/AntennaPod,ChaoticMind/AntennaPod,LTUvac/AntennaPod,twiceyuan/AntennaPod,keunes/AntennaPod,wooi/AntennaPod,volhol/AntennaPod,ChaoticMind/AntennaPod,facebookarchive/AntennaPod,gaohongyuan/AntennaPod,keunes/AntennaPod,LTUvac/AntennaPod,drabux/AntennaPod,volhol/AntennaPod,wskplho/AntennaPod,udif/AntennaPod,gk23/AntennaPod,the100rabh/AntennaPod,waylife/AntennaPod,SpicyCurry/AntennaPod,corecode/AntennaPod,richq/AntennaPod,SpicyCurry/AntennaPod,johnjohndoe/AntennaPod,johnjohndoe/AntennaPod,wooi/AntennaPod,waylife/AntennaPod,mxttie/AntennaPod,queenp/AntennaPod,hgl888/AntennaPod,wangjun/AntennaPod,queenp/AntennaPod,cdysthe/AntennaPod,SpicyCurry/AntennaPod,samarone/AntennaPod
|
java
|
## Code Before:
package de.danoeh.antennapod.util;
import java.util.Arrays;
/** Generates valid filenames for a given string. */
public class FileNameGenerator {
private static final char[] ILLEGAL_CHARACTERS = { '/', '\\', '?', '%',
'*', ':', '|', '"', '<', '>' };
static {
Arrays.sort(ILLEGAL_CHARACTERS);
}
private FileNameGenerator() {
}
/**
* This method will return a new string that doesn't contain any illegal
* characters of the given string.
*/
public static String generateFileName(String string) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (Arrays.binarySearch(ILLEGAL_CHARACTERS, c) < 0) {
builder.append(c);
}
}
return builder.toString();
}
public static long generateLong(final String str) {
return str.hashCode();
}
}
## Instruction:
Remove trailing spaces in filenames
FAT does not support trailing spaces, so we must remove them
## Code After:
package de.danoeh.antennapod.util;
import java.util.Arrays;
/** Generates valid filenames for a given string. */
public class FileNameGenerator {
private static final char[] ILLEGAL_CHARACTERS = { '/', '\\', '?', '%',
'*', ':', '|', '"', '<', '>' };
static {
Arrays.sort(ILLEGAL_CHARACTERS);
}
private FileNameGenerator() {
}
/**
* This method will return a new string that doesn't contain any illegal
* characters of the given string.
*/
public static String generateFileName(String string) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (Arrays.binarySearch(ILLEGAL_CHARACTERS, c) < 0) {
builder.append(c).replaceFirst(" *$","");
}
}
return builder.toString();
}
public static long generateLong(final String str) {
return str.hashCode();
}
}
|
# ... existing code ...
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (Arrays.binarySearch(ILLEGAL_CHARACTERS, c) < 0) {
builder.append(c).replaceFirst(" *$","");
}
}
return builder.toString();
# ... rest of the code ...
|
55b9a048f5dd3018c336a0e367c97ab1367ed440
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='DCA',
version='0.2.3',
description='Count autoencoder for scRNA-seq denoising',
author='Gokcen Eraslan',
author_email="[email protected]",
packages=['dca'],
install_requires=['numpy>=1.7',
'keras>=2.0.8',
'h5py',
'six>=1.10.0',
'scikit-learn',
'scanpy',
'kopt',
'pandas' #for preprocessing
],
url='https://github.com/theislab/dca',
entry_points={
'console_scripts': [
'dca = dca.__main__:main'
]},
license='Apache License 2.0',
classifiers=['License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Programming Language :: Python :: 3.5'],
)
|
from setuptools import setup
setup(
name='DCA',
version='0.2.3',
description='Count autoencoder for scRNA-seq denoising',
author='Gokcen Eraslan',
author_email="[email protected]",
packages=['dca'],
install_requires=['numpy>=1.7',
'keras>=2.0.8',
'tensorflow>=2.0',
'h5py',
'six>=1.10.0',
'scikit-learn',
'scanpy',
'kopt',
'pandas' #for preprocessing
],
url='https://github.com/theislab/dca',
entry_points={
'console_scripts': [
'dca = dca.__main__:main'
]},
license='Apache License 2.0',
classifiers=['License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Programming Language :: Python :: 3.5'],
)
|
Add tf to the requirements
|
Add tf to the requirements
|
Python
|
apache-2.0
|
theislab/dca,theislab/dca,theislab/dca
|
python
|
## Code Before:
from setuptools import setup
setup(
name='DCA',
version='0.2.3',
description='Count autoencoder for scRNA-seq denoising',
author='Gokcen Eraslan',
author_email="[email protected]",
packages=['dca'],
install_requires=['numpy>=1.7',
'keras>=2.0.8',
'h5py',
'six>=1.10.0',
'scikit-learn',
'scanpy',
'kopt',
'pandas' #for preprocessing
],
url='https://github.com/theislab/dca',
entry_points={
'console_scripts': [
'dca = dca.__main__:main'
]},
license='Apache License 2.0',
classifiers=['License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Programming Language :: Python :: 3.5'],
)
## Instruction:
Add tf to the requirements
## Code After:
from setuptools import setup
setup(
name='DCA',
version='0.2.3',
description='Count autoencoder for scRNA-seq denoising',
author='Gokcen Eraslan',
author_email="[email protected]",
packages=['dca'],
install_requires=['numpy>=1.7',
'keras>=2.0.8',
'tensorflow>=2.0',
'h5py',
'six>=1.10.0',
'scikit-learn',
'scanpy',
'kopt',
'pandas' #for preprocessing
],
url='https://github.com/theislab/dca',
entry_points={
'console_scripts': [
'dca = dca.__main__:main'
]},
license='Apache License 2.0',
classifiers=['License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Programming Language :: Python :: 3.5'],
)
|
...
packages=['dca'],
install_requires=['numpy>=1.7',
'keras>=2.0.8',
'tensorflow>=2.0',
'h5py',
'six>=1.10.0',
'scikit-learn',
...
|
052042e2f48b7936a6057c18a128f497d5e5b1a4
|
folium/__init__.py
|
folium/__init__.py
|
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook
|
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
|
Make features accessible from root
|
Make features accessible from root
|
Python
|
mit
|
QuLogic/folium,talespaiva/folium,andrewgiessel/folium,themiurgo/folium,shankari/folium,python-visualization/folium,talespaiva/folium,QuLogic/folium,BibMartin/folium,ocefpaf/folium,themiurgo/folium,talespaiva/folium,andrewgiessel/folium,BibMartin/folium,ocefpaf/folium,python-visualization/folium,shankari/folium,shankari/folium,themiurgo/folium,BibMartin/folium,andrewgiessel/folium,talespaiva/folium,QuLogic/folium
|
python
|
## Code Before:
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook
## Instruction:
Make features accessible from root
## Code After:
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
|
// ... existing code ...
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
// ... rest of the code ...
|
bcfdfc0a3a20a9425d561c49102f310487414b7b
|
gwt-polymer-demo/src/main/java/com/vaadin/components/gwt/polymer/client/sampler/gwt/PaperTabsWidgetView.java
|
gwt-polymer-demo/src/main/java/com/vaadin/components/gwt/polymer/client/sampler/gwt/PaperTabsWidgetView.java
|
package com.vaadin.components.gwt.polymer.client.sampler.gwt;
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.vaadin.components.gwt.polymer.client.widget.PaperTab;
import com.vaadin.components.gwt.polymer.client.widget.PaperTabs;
import com.vaadin.components.gwt.polymer.client.widget.PaperToast;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEvent;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEventHandler;
public class PaperTabsWidgetView extends Composite {
interface PaperTabsWidgetUiBinder extends UiBinder<HTMLPanel, PaperTabsWidgetView> {
}
private static PaperTabsWidgetUiBinder ourUiBinder = GWT.create(PaperTabsWidgetUiBinder.class);
@UiField
PaperTabs tabs;
@UiField
PaperToast toast;
public PaperTabsWidgetView() {
initWidget(ourUiBinder.createAndBindUi(this));
tabs.setSelected(0);
PaperTab tab = new PaperTab("dynamically created item");
tabs.add(tab);
tabs.addCoreSelectHandler(new CoreSelectEventHandler() {
@Override
public void onCoreSelect(CoreSelectEvent event) {
toast.setText("widget event handler");
toast.show();
}
});
}
}
|
package com.vaadin.components.gwt.polymer.client.sampler.gwt;
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.vaadin.components.gwt.polymer.client.widget.PaperTab;
import com.vaadin.components.gwt.polymer.client.widget.PaperTabs;
import com.vaadin.components.gwt.polymer.client.widget.PaperToast;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEvent;
public class PaperTabsWidgetView extends Composite {
interface PaperTabsWidgetUiBinder extends UiBinder<HTMLPanel, PaperTabsWidgetView> {
}
private static PaperTabsWidgetUiBinder ourUiBinder = GWT.create(PaperTabsWidgetUiBinder.class);
@UiField PaperTabs tabs;
@UiField PaperToast toast;
public PaperTabsWidgetView() {
initWidget(ourUiBinder.createAndBindUi(this));
tabs.setSelected(0);
PaperTab tab = new PaperTab("dynamically created item");
tabs.add(tab);
}
@UiHandler("tabs")
void onNameGotPressed(CoreSelectEvent event) {
toast.setText("widget event handler");
toast.show();
}
}
|
Use UiHandler in the example
|
Use UiHandler in the example
|
Java
|
apache-2.0
|
manolo/gwt-polymer-elements,vaadin/gwt-polymer-elements
|
java
|
## Code Before:
package com.vaadin.components.gwt.polymer.client.sampler.gwt;
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.vaadin.components.gwt.polymer.client.widget.PaperTab;
import com.vaadin.components.gwt.polymer.client.widget.PaperTabs;
import com.vaadin.components.gwt.polymer.client.widget.PaperToast;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEvent;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEventHandler;
public class PaperTabsWidgetView extends Composite {
interface PaperTabsWidgetUiBinder extends UiBinder<HTMLPanel, PaperTabsWidgetView> {
}
private static PaperTabsWidgetUiBinder ourUiBinder = GWT.create(PaperTabsWidgetUiBinder.class);
@UiField
PaperTabs tabs;
@UiField
PaperToast toast;
public PaperTabsWidgetView() {
initWidget(ourUiBinder.createAndBindUi(this));
tabs.setSelected(0);
PaperTab tab = new PaperTab("dynamically created item");
tabs.add(tab);
tabs.addCoreSelectHandler(new CoreSelectEventHandler() {
@Override
public void onCoreSelect(CoreSelectEvent event) {
toast.setText("widget event handler");
toast.show();
}
});
}
}
## Instruction:
Use UiHandler in the example
## Code After:
package com.vaadin.components.gwt.polymer.client.sampler.gwt;
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.vaadin.components.gwt.polymer.client.widget.PaperTab;
import com.vaadin.components.gwt.polymer.client.widget.PaperTabs;
import com.vaadin.components.gwt.polymer.client.widget.PaperToast;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEvent;
public class PaperTabsWidgetView extends Composite {
interface PaperTabsWidgetUiBinder extends UiBinder<HTMLPanel, PaperTabsWidgetView> {
}
private static PaperTabsWidgetUiBinder ourUiBinder = GWT.create(PaperTabsWidgetUiBinder.class);
@UiField PaperTabs tabs;
@UiField PaperToast toast;
public PaperTabsWidgetView() {
initWidget(ourUiBinder.createAndBindUi(this));
tabs.setSelected(0);
PaperTab tab = new PaperTab("dynamically created item");
tabs.add(tab);
}
@UiHandler("tabs")
void onNameGotPressed(CoreSelectEvent event) {
toast.setText("widget event handler");
toast.show();
}
}
|
...
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.vaadin.components.gwt.polymer.client.widget.PaperTab;
...
import com.vaadin.components.gwt.polymer.client.widget.PaperTabs;
import com.vaadin.components.gwt.polymer.client.widget.PaperToast;
import com.vaadin.components.gwt.polymer.client.widget.event.CoreSelectEvent;
public class PaperTabsWidgetView extends Composite {
interface PaperTabsWidgetUiBinder extends UiBinder<HTMLPanel, PaperTabsWidgetView> {
...
}
private static PaperTabsWidgetUiBinder ourUiBinder = GWT.create(PaperTabsWidgetUiBinder.class);
@UiField PaperTabs tabs;
@UiField PaperToast toast;
public PaperTabsWidgetView() {
initWidget(ourUiBinder.createAndBindUi(this));
tabs.setSelected(0);
PaperTab tab = new PaperTab("dynamically created item");
tabs.add(tab);
}
@UiHandler("tabs")
void onNameGotPressed(CoreSelectEvent event) {
toast.setText("widget event handler");
toast.show();
}
}
...
|
50621ef5b141470879a786088391a516b4f63d52
|
note/models.py
|
note/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
Migrate to a custom User class.
|
Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.
|
Python
|
bsd-2-clause
|
LeMeteore/boomer2
|
python
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(User)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
## Instruction:
Migrate to a custom User class.
Step1: reference the User class, using the AUTH_USER_MODEL setting.
## Code After:
from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
|
// ... existing code ...
from django.db import models
from django.conf import settings
# Create your models here.
# Create your models here.
class Note(models.Model):
# always reference the User class using setting conf
author = models.ForeignKey(settings.AUTH_USER_MODEL)
value = models.IntegerField(max_length=255)
def __str__(self):
return "your note is %s" % self.value
// ... rest of the code ...
|
805e67ad540e3072929dea30b8894af87fc622ef
|
uiharu/__init__.py
|
uiharu/__init__.py
|
import logging
from flask import Flask
log = logging.getLogger(__name__)
def create_app(config_dict):
app = Flask(__name__, static_folder=None)
app.config.update(**config_dict)
from uiharu.api.views import api as api_blueprint
from uiharu.weather.views import weather as weather_blueprint
app.register_blueprint(api_blueprint, url_prefix='/api/v1')
app.register_blueprint(weather_blueprint)
log.info(app.url_map)
return app
|
import logging
log = logging.getLogger(__name__)
|
Remove flask usage in init
|
Remove flask usage in init
|
Python
|
mit
|
kennydo/uiharu
|
python
|
## Code Before:
import logging
from flask import Flask
log = logging.getLogger(__name__)
def create_app(config_dict):
app = Flask(__name__, static_folder=None)
app.config.update(**config_dict)
from uiharu.api.views import api as api_blueprint
from uiharu.weather.views import weather as weather_blueprint
app.register_blueprint(api_blueprint, url_prefix='/api/v1')
app.register_blueprint(weather_blueprint)
log.info(app.url_map)
return app
## Instruction:
Remove flask usage in init
## Code After:
import logging
log = logging.getLogger(__name__)
|
// ... existing code ...
import logging
log = logging.getLogger(__name__)
// ... rest of the code ...
|
5e50f8127a48a08d66bdc9d8aec28064b33ad864
|
game.py
|
game.py
|
import datetime
import map_loader
class Game(object):
def __init__(self, name=name, players=players, map=None):
""" Initialize a new game. """
self.name = name,
self.players = players, # List of player usernames
self.status = 'Waiting',
self.raw_state = self.generate_clean_state(), # JSON object
self.created = datetime.datetime.now(),
map = 'default' if map is None else map
self.map = map.read_map_file(map)
def generate_clean_state(self):
""" Generates a blank game state JSON object. """
return '{}'
def load_from_state(self):
""" Load game attributes from raw game state. """
pass
def serialize(self):
""" Turn game into a serialized game state for storage. """
pass
def update(self):
""" Execute a round. """
pass
|
import datetime
import json
import map_loader
class GAME_STATUS(object):
""" Game status constants. """
lobby = 'waiting for players'
waiting = 'waiting for moves'
playing = 'playing'
cancelled = 'cancelled'
complete = 'complete'
class Game(object):
def __init__(self, name=name, players=players, map='default'):
""" Initialize a new game. """
self.name = name,
self.players = players, # List of player usernames
self.status = GAME_STATUS.lobby,
self.created = datetime.datetime.now(),
# These attributes are persisted in the raw_state, not DB properties
self.map = map.read_map_file(map)
self.current_turn = 0
self.max_turns = 0
self.raw_state = self.serialize(), # JSON state (a DB property)
def load_state_from_json(self):
""" Load game attributes from raw game state. """
state = json.loads(self.raw_state)
self.map = state['map']
self.current_turn, self.max_turns = state['turn']
def serialize_state(self):
""" Turn game state into a serialized game state for storage. """
state = {
'map': self.map,
'turn': [self.current_turn, self.max_turns],
}
return json.dumps(state)
def update(self):
""" Execute a round. """
self.current_turn += 1
if self.current_turn == self.max_turns:
self.status = GAME_STATUS.complete
|
Add some state related methods to Game
|
Add some state related methods to Game
|
Python
|
mit
|
supermitch/mech-ai,supermitch/mech-ai,supermitch/mech-ai
|
python
|
## Code Before:
import datetime
import map_loader
class Game(object):
def __init__(self, name=name, players=players, map=None):
""" Initialize a new game. """
self.name = name,
self.players = players, # List of player usernames
self.status = 'Waiting',
self.raw_state = self.generate_clean_state(), # JSON object
self.created = datetime.datetime.now(),
map = 'default' if map is None else map
self.map = map.read_map_file(map)
def generate_clean_state(self):
""" Generates a blank game state JSON object. """
return '{}'
def load_from_state(self):
""" Load game attributes from raw game state. """
pass
def serialize(self):
""" Turn game into a serialized game state for storage. """
pass
def update(self):
""" Execute a round. """
pass
## Instruction:
Add some state related methods to Game
## Code After:
import datetime
import json
import map_loader
class GAME_STATUS(object):
""" Game status constants. """
lobby = 'waiting for players'
waiting = 'waiting for moves'
playing = 'playing'
cancelled = 'cancelled'
complete = 'complete'
class Game(object):
def __init__(self, name=name, players=players, map='default'):
""" Initialize a new game. """
self.name = name,
self.players = players, # List of player usernames
self.status = GAME_STATUS.lobby,
self.created = datetime.datetime.now(),
# These attributes are persisted in the raw_state, not DB properties
self.map = map.read_map_file(map)
self.current_turn = 0
self.max_turns = 0
self.raw_state = self.serialize(), # JSON state (a DB property)
def load_state_from_json(self):
""" Load game attributes from raw game state. """
state = json.loads(self.raw_state)
self.map = state['map']
self.current_turn, self.max_turns = state['turn']
def serialize_state(self):
""" Turn game state into a serialized game state for storage. """
state = {
'map': self.map,
'turn': [self.current_turn, self.max_turns],
}
return json.dumps(state)
def update(self):
""" Execute a round. """
self.current_turn += 1
if self.current_turn == self.max_turns:
self.status = GAME_STATUS.complete
|
...
import datetime
import json
import map_loader
class GAME_STATUS(object):
""" Game status constants. """
lobby = 'waiting for players'
waiting = 'waiting for moves'
playing = 'playing'
cancelled = 'cancelled'
complete = 'complete'
class Game(object):
def __init__(self, name=name, players=players, map='default'):
""" Initialize a new game. """
self.name = name,
self.players = players, # List of player usernames
self.status = GAME_STATUS.lobby,
self.created = datetime.datetime.now(),
# These attributes are persisted in the raw_state, not DB properties
self.map = map.read_map_file(map)
self.current_turn = 0
self.max_turns = 0
self.raw_state = self.serialize(), # JSON state (a DB property)
def load_state_from_json(self):
""" Load game attributes from raw game state. """
state = json.loads(self.raw_state)
self.map = state['map']
self.current_turn, self.max_turns = state['turn']
def serialize_state(self):
""" Turn game state into a serialized game state for storage. """
state = {
'map': self.map,
'turn': [self.current_turn, self.max_turns],
}
return json.dumps(state)
def update(self):
""" Execute a round. """
self.current_turn += 1
if self.current_turn == self.max_turns:
self.status = GAME_STATUS.complete
...
|
5c7b33574550d37454b4362fa0896a4dad6e98d1
|
aesthetic/output/gif.py
|
aesthetic/output/gif.py
|
from PIL import Image
from PIL import ImageDraw
def render(animation, out, scale=8):
images = [render_frame(colors, scale=scale) for colors in animation]
save_gif(out, *images)
def render_frame(colors, scale=8):
led_count = 53
size = (led_count * scale, scale)
im = Image.new("RGB", size, "black")
d = ImageDraw.Draw(im)
for idx, color in enumerate(colors):
color = tuple(map(int, color))
x0 = scale * idx
y0 = 0
x1 = scale * (idx + 1)
y1 = scale
d.rectangle((x0, y0, x1, y1), fill=color)
return im
def save_gif(out, image, *more_images):
image.save(out, save_all=True,
append_images=list(more_images),
loop=1000,
duration=50)
|
from PIL import Image
from PIL import ImageDraw
def render(animation, out, scale=8):
images = [render_frame(colors, scale=scale) for colors in animation]
save_gif(out, *images)
def render_frame(colors, scale=8):
led_count = 53
size = (led_count * scale, scale)
im = Image.new("RGB", size, "black")
d = ImageDraw.Draw(im)
for idx, color in enumerate(colors):
color = tuple(map(int, color))
x0 = scale * idx
y0 = 0
x1 = scale * (idx + 1)
y1 = scale
d.rectangle((x0, y0, x1, y1), fill=color)
return im
def save_gif(out, image, *more_images):
delay_ms = 1000 * 0.035
image.save(out, save_all=True,
append_images=list(more_images),
duration=delay_ms, optimize=True)
|
Optimize GIF palette (too many colors right now), better GIF timing options.
|
Optimize GIF palette (too many colors right now), better GIF timing options.
|
Python
|
apache-2.0
|
gnoack/aesthetic
|
python
|
## Code Before:
from PIL import Image
from PIL import ImageDraw
def render(animation, out, scale=8):
images = [render_frame(colors, scale=scale) for colors in animation]
save_gif(out, *images)
def render_frame(colors, scale=8):
led_count = 53
size = (led_count * scale, scale)
im = Image.new("RGB", size, "black")
d = ImageDraw.Draw(im)
for idx, color in enumerate(colors):
color = tuple(map(int, color))
x0 = scale * idx
y0 = 0
x1 = scale * (idx + 1)
y1 = scale
d.rectangle((x0, y0, x1, y1), fill=color)
return im
def save_gif(out, image, *more_images):
image.save(out, save_all=True,
append_images=list(more_images),
loop=1000,
duration=50)
## Instruction:
Optimize GIF palette (too many colors right now), better GIF timing options.
## Code After:
from PIL import Image
from PIL import ImageDraw
def render(animation, out, scale=8):
images = [render_frame(colors, scale=scale) for colors in animation]
save_gif(out, *images)
def render_frame(colors, scale=8):
led_count = 53
size = (led_count * scale, scale)
im = Image.new("RGB", size, "black")
d = ImageDraw.Draw(im)
for idx, color in enumerate(colors):
color = tuple(map(int, color))
x0 = scale * idx
y0 = 0
x1 = scale * (idx + 1)
y1 = scale
d.rectangle((x0, y0, x1, y1), fill=color)
return im
def save_gif(out, image, *more_images):
delay_ms = 1000 * 0.035
image.save(out, save_all=True,
append_images=list(more_images),
duration=delay_ms, optimize=True)
|
# ... existing code ...
return im
def save_gif(out, image, *more_images):
delay_ms = 1000 * 0.035
image.save(out, save_all=True,
append_images=list(more_images),
duration=delay_ms, optimize=True)
# ... rest of the code ...
|
b1aac6a0b29a6ed46b77aab37ff52c765a280ec6
|
ikalog/utils/matcher.py
|
ikalog/utils/matcher.py
|
from ikalog.utils.image_filters import *
from ikalog.utils.ikamatcher1 import IkaMatcher1 as IkaMatcher
|
from ikalog.utils.image_filters import *
from ikalog.utils.ikamatcher1 import IkaMatcher1 as IkaMatcher
#from ikalog.utils.ikamatcher2.matcher import IkaMatcher2 as IkaMatcher
|
Switch to IkaMatcher2 (Numpy_8bit_fast); 10% faster in overall performance
|
utils: Switch to IkaMatcher2 (Numpy_8bit_fast); 10% faster in overall performance
Signed-off-by: Takeshi HASEGAWA <[email protected]>
|
Python
|
apache-2.0
|
deathmetalland/IkaLog,hasegaw/IkaLog,deathmetalland/IkaLog,hasegaw/IkaLog,hasegaw/IkaLog,deathmetalland/IkaLog
|
python
|
## Code Before:
from ikalog.utils.image_filters import *
from ikalog.utils.ikamatcher1 import IkaMatcher1 as IkaMatcher
## Instruction:
utils: Switch to IkaMatcher2 (Numpy_8bit_fast); 10% faster in overall performance
Signed-off-by: Takeshi HASEGAWA <[email protected]>
## Code After:
from ikalog.utils.image_filters import *
from ikalog.utils.ikamatcher1 import IkaMatcher1 as IkaMatcher
#from ikalog.utils.ikamatcher2.matcher import IkaMatcher2 as IkaMatcher
|
# ... existing code ...
from ikalog.utils.image_filters import *
from ikalog.utils.ikamatcher1 import IkaMatcher1 as IkaMatcher
#from ikalog.utils.ikamatcher2.matcher import IkaMatcher2 as IkaMatcher
# ... rest of the code ...
|
58000a20fdc803d5bd77059b40a51180f52ce3ed
|
runtime/src/main/kotlin/kotlin/collections/MutableCollections.kt
|
runtime/src/main/kotlin/kotlin/collections/MutableCollections.kt
|
/*
* Copyright 2010-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license
* that can be found in the LICENSE file.
*/
package kotlin.collections
import kotlin.comparisons.*
/**
* Sorts elements in the list in-place according to their natural sort order.
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T : Comparable<T>> MutableList<T>.sort(): Unit = sortWith(Comparator<T> { a: T, b: T -> a.compareTo(b) })
/**
* Sorts elements in the list in-place according to the order specified with [comparator].
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T> MutableList<T>.sortWith(comparator: Comparator<in T>): Unit {
if (size > 1) {
val it = listIterator()
val sortedArray = @Suppress("TYPE_PARAMETER_AS_REIFIED") toTypedArray().apply { sortWith(comparator) }
for (v in sortedArray) {
it.next()
it.set(v)
}
}
}
|
/*
* Copyright 2010-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license
* that can be found in the LICENSE file.
*/
package kotlin.collections
/**
* Sorts elements in the list in-place according to their natural sort order.
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T : Comparable<T>> MutableList<T>.sort(): Unit = sortWith(naturalOrder())
/**
* Sorts elements in the list in-place according to the order specified with [comparator].
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T> MutableList<T>.sortWith(comparator: Comparator<in T>): Unit {
if (size > 1) {
val it = listIterator()
val sortedArray = @Suppress("UNCHECKED_CAST") (toTypedArray<Any?>() as Array<T>).apply { sortWith(comparator) }
for (v in sortedArray) {
it.next()
it.set(v)
}
}
}
|
Rewrite MutableList.sortWith to suppress warning instead of error
|
Rewrite MutableList.sortWith to suppress warning instead of error
Use available naturalOrder comparator in MutableList.sort()
|
Kotlin
|
apache-2.0
|
JetBrains/kotlin-native,wiltonlazary/kotlin-native,wiltonlazary/kotlin-native,wiltonlazary/kotlin-native,wiltonlazary/kotlin-native,JetBrains/kotlin-native,JetBrains/kotlin-native,JetBrains/kotlin-native,JetBrains/kotlin-native,JetBrains/kotlin-native,wiltonlazary/kotlin-native,JetBrains/kotlin-native,JetBrains/kotlin-native,JetBrains/kotlin-native
|
kotlin
|
## Code Before:
/*
* Copyright 2010-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license
* that can be found in the LICENSE file.
*/
package kotlin.collections
import kotlin.comparisons.*
/**
* Sorts elements in the list in-place according to their natural sort order.
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T : Comparable<T>> MutableList<T>.sort(): Unit = sortWith(Comparator<T> { a: T, b: T -> a.compareTo(b) })
/**
* Sorts elements in the list in-place according to the order specified with [comparator].
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T> MutableList<T>.sortWith(comparator: Comparator<in T>): Unit {
if (size > 1) {
val it = listIterator()
val sortedArray = @Suppress("TYPE_PARAMETER_AS_REIFIED") toTypedArray().apply { sortWith(comparator) }
for (v in sortedArray) {
it.next()
it.set(v)
}
}
}
## Instruction:
Rewrite MutableList.sortWith to suppress warning instead of error
Use available naturalOrder comparator in MutableList.sort()
## Code After:
/*
* Copyright 2010-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license
* that can be found in the LICENSE file.
*/
package kotlin.collections
/**
* Sorts elements in the list in-place according to their natural sort order.
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T : Comparable<T>> MutableList<T>.sort(): Unit = sortWith(naturalOrder())
/**
* Sorts elements in the list in-place according to the order specified with [comparator].
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T> MutableList<T>.sortWith(comparator: Comparator<in T>): Unit {
if (size > 1) {
val it = listIterator()
val sortedArray = @Suppress("UNCHECKED_CAST") (toTypedArray<Any?>() as Array<T>).apply { sortWith(comparator) }
for (v in sortedArray) {
it.next()
it.set(v)
}
}
}
|
...
package kotlin.collections
/**
* Sorts elements in the list in-place according to their natural sort order.
*
* The sort is _stable_. It means that equal elements preserve their order relative to each other after sorting.
*/
public actual fun <T : Comparable<T>> MutableList<T>.sort(): Unit = sortWith(naturalOrder())
/**
* Sorts elements in the list in-place according to the order specified with [comparator].
...
public actual fun <T> MutableList<T>.sortWith(comparator: Comparator<in T>): Unit {
if (size > 1) {
val it = listIterator()
val sortedArray = @Suppress("UNCHECKED_CAST") (toTypedArray<Any?>() as Array<T>).apply { sortWith(comparator) }
for (v in sortedArray) {
it.next()
it.set(v)
...
|
de2e3dd947660b4b1222820141c5c7cd66098349
|
django_split/models.py
|
django_split/models.py
|
from django.db import models
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey('auth.User', related_name=None)
group = models.IntegerField()
class Meta:
unique_together = (
('experiment', 'user'),
)
class ExperimentState(models.Model):
experiment = models.CharField(max_length=48, primary_key=True)
started = models.DateTimeField(null=True)
completed = models.DateTimeField(null=True)
class ExperimentResult(models.Model):
experiment = models.CharField(max_length=48)
group = models.IntegerField()
metric = models.IntegerField()
percentile = models.IntegerField()
value = models.FloatField()
class Meta:
unique_together = (
('experiment', 'group', 'metric', 'percentile'),
)
|
from django.db import models
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey(
'auth.User',
related_name='django_split_experiment_groups',
)
group = models.IntegerField()
class Meta:
unique_together = (
('experiment', 'user'),
)
class ExperimentState(models.Model):
experiment = models.CharField(max_length=48, primary_key=True)
started = models.DateTimeField(null=True)
completed = models.DateTimeField(null=True)
class ExperimentResult(models.Model):
experiment = models.CharField(max_length=48)
group = models.IntegerField()
metric = models.IntegerField()
percentile = models.IntegerField()
value = models.FloatField()
class Meta:
unique_together = (
('experiment', 'group', 'metric', 'percentile'),
)
|
Add an explicit related name
|
Add an explicit related name
|
Python
|
mit
|
prophile/django_split
|
python
|
## Code Before:
from django.db import models
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey('auth.User', related_name=None)
group = models.IntegerField()
class Meta:
unique_together = (
('experiment', 'user'),
)
class ExperimentState(models.Model):
experiment = models.CharField(max_length=48, primary_key=True)
started = models.DateTimeField(null=True)
completed = models.DateTimeField(null=True)
class ExperimentResult(models.Model):
experiment = models.CharField(max_length=48)
group = models.IntegerField()
metric = models.IntegerField()
percentile = models.IntegerField()
value = models.FloatField()
class Meta:
unique_together = (
('experiment', 'group', 'metric', 'percentile'),
)
## Instruction:
Add an explicit related name
## Code After:
from django.db import models
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey(
'auth.User',
related_name='django_split_experiment_groups',
)
group = models.IntegerField()
class Meta:
unique_together = (
('experiment', 'user'),
)
class ExperimentState(models.Model):
experiment = models.CharField(max_length=48, primary_key=True)
started = models.DateTimeField(null=True)
completed = models.DateTimeField(null=True)
class ExperimentResult(models.Model):
experiment = models.CharField(max_length=48)
group = models.IntegerField()
metric = models.IntegerField()
percentile = models.IntegerField()
value = models.FloatField()
class Meta:
unique_together = (
('experiment', 'group', 'metric', 'percentile'),
)
|
// ... existing code ...
class ExperimentGroup(models.Model):
experiment = models.CharField(max_length=48)
user = models.ForeignKey(
'auth.User',
related_name='django_split_experiment_groups',
)
group = models.IntegerField()
// ... rest of the code ...
|
8d94846a4443ed99ccd7877bad0a558c73593441
|
src/shutdowncheck.h
|
src/shutdowncheck.h
|
class ShutdownCheck : public QObject
{
Q_OBJECT
public:
enum Status
{
OK,
FAILED,
Error
};
Q_ENUM(Status)
Q_PROPERTY(int status READ status NOTIFY statusChanged)
explicit ShutdownCheck(QObject* parent = 0);
~ShutdownCheck();
int status() const;
void setStatus(int status);
signals:
void statusChanged();
private:
QString _dataPath;
QString _mark;
int _status;
void writeMark();
void removeMark();
bool markExists();
};
#endif // QAK_SHUTDOWN_CHECK_H
|
class ShutdownCheck : public QObject
{
Q_OBJECT
public:
enum Status
{
OK,
FAILED,
Error
};
Q_ENUM(Status)
Q_PROPERTY(int status READ status NOTIFY statusChanged)
explicit ShutdownCheck(QObject* parent = 0);
~ShutdownCheck();
int status() const;
void setStatus(int status);
public slots:
void writeMark();
void removeMark();
bool markExists();
signals:
void statusChanged();
private:
QString _dataPath;
QString _mark;
int _status;
};
#endif // QAK_SHUTDOWN_CHECK_H
|
Allow custom shutdown check cycles by exposing private functions
|
Allow custom shutdown check cycles by exposing private functions
|
C
|
mit
|
Larpon/qak,Larpon/qak,Larpon/qak
|
c
|
## Code Before:
class ShutdownCheck : public QObject
{
Q_OBJECT
public:
enum Status
{
OK,
FAILED,
Error
};
Q_ENUM(Status)
Q_PROPERTY(int status READ status NOTIFY statusChanged)
explicit ShutdownCheck(QObject* parent = 0);
~ShutdownCheck();
int status() const;
void setStatus(int status);
signals:
void statusChanged();
private:
QString _dataPath;
QString _mark;
int _status;
void writeMark();
void removeMark();
bool markExists();
};
#endif // QAK_SHUTDOWN_CHECK_H
## Instruction:
Allow custom shutdown check cycles by exposing private functions
## Code After:
class ShutdownCheck : public QObject
{
Q_OBJECT
public:
enum Status
{
OK,
FAILED,
Error
};
Q_ENUM(Status)
Q_PROPERTY(int status READ status NOTIFY statusChanged)
explicit ShutdownCheck(QObject* parent = 0);
~ShutdownCheck();
int status() const;
void setStatus(int status);
public slots:
void writeMark();
void removeMark();
bool markExists();
signals:
void statusChanged();
private:
QString _dataPath;
QString _mark;
int _status;
};
#endif // QAK_SHUTDOWN_CHECK_H
|
# ... existing code ...
int status() const;
void setStatus(int status);
public slots:
void writeMark();
void removeMark();
bool markExists();
signals:
void statusChanged();
# ... modified code ...
QString _mark;
int _status;
};
#endif // QAK_SHUTDOWN_CHECK_H
# ... rest of the code ...
|
24b8de9cfdcc36b1cc6001b84430411d32ac58a6
|
setup.py
|
setup.py
|
"""Mailmerge build and install configuration."""
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme_file:
README = readme_file.read()
setup(
name="mailmerge",
description="A simple, command line mail merge tool",
long_description=README,
version="1.9",
author="Andrew DeOrio",
author_email="[email protected]",
url="https://github.com/awdeorio/mailmerge/",
license="MIT",
packages=["mailmerge"],
keywords=["mail merge", "mailmerge", "email"],
install_requires=[
"chardet",
"click",
"configparser",
"jinja2",
"future",
"backports.csv;python_version<='2.7'",
"markdown",
],
extras_require={
'dev': [
'pylint',
'pydocstyle',
'pycodestyle',
'pytest',
'tox',
]
},
# Python command line utilities will be installed in a PATH-accessible bin/
entry_points={
'console_scripts': [
'mailmerge = mailmerge.__main__:cli',
]
},
)
|
"""Mailmerge build and install configuration."""
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme_file:
README = readme_file.read()
setup(
name="mailmerge",
description="A simple, command line mail merge tool",
long_description=README,
version="1.9",
author="Andrew DeOrio",
author_email="[email protected]",
url="https://github.com/awdeorio/mailmerge/",
license="MIT",
packages=["mailmerge"],
keywords=["mail merge", "mailmerge", "email"],
install_requires=[
"chardet",
"click",
"configparser",
"jinja2",
"future",
"backports.csv;python_version<='2.7'",
"markdown",
],
extras_require={
'dev': [
'pylint',
'pydocstyle',
'pycodestyle',
'pytest',
'tox',
'pdbpp'
]
},
# Python command line utilities will be installed in a PATH-accessible bin/
entry_points={
'console_scripts': [
'mailmerge = mailmerge.__main__:cli',
]
},
)
|
Add pdbpp to dev dependencies
|
Add pdbpp to dev dependencies
|
Python
|
mit
|
awdeorio/mailmerge
|
python
|
## Code Before:
"""Mailmerge build and install configuration."""
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme_file:
README = readme_file.read()
setup(
name="mailmerge",
description="A simple, command line mail merge tool",
long_description=README,
version="1.9",
author="Andrew DeOrio",
author_email="[email protected]",
url="https://github.com/awdeorio/mailmerge/",
license="MIT",
packages=["mailmerge"],
keywords=["mail merge", "mailmerge", "email"],
install_requires=[
"chardet",
"click",
"configparser",
"jinja2",
"future",
"backports.csv;python_version<='2.7'",
"markdown",
],
extras_require={
'dev': [
'pylint',
'pydocstyle',
'pycodestyle',
'pytest',
'tox',
]
},
# Python command line utilities will be installed in a PATH-accessible bin/
entry_points={
'console_scripts': [
'mailmerge = mailmerge.__main__:cli',
]
},
)
## Instruction:
Add pdbpp to dev dependencies
## Code After:
"""Mailmerge build and install configuration."""
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme_file:
README = readme_file.read()
setup(
name="mailmerge",
description="A simple, command line mail merge tool",
long_description=README,
version="1.9",
author="Andrew DeOrio",
author_email="[email protected]",
url="https://github.com/awdeorio/mailmerge/",
license="MIT",
packages=["mailmerge"],
keywords=["mail merge", "mailmerge", "email"],
install_requires=[
"chardet",
"click",
"configparser",
"jinja2",
"future",
"backports.csv;python_version<='2.7'",
"markdown",
],
extras_require={
'dev': [
'pylint',
'pydocstyle',
'pycodestyle',
'pytest',
'tox',
'pdbpp'
]
},
# Python command line utilities will be installed in a PATH-accessible bin/
entry_points={
'console_scripts': [
'mailmerge = mailmerge.__main__:cli',
]
},
)
|
...
'pycodestyle',
'pytest',
'tox',
'pdbpp'
]
},
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.